ruby-mkrf-0.2.3.orig/0000755000000000000000000000000011672453175013102 5ustar rootrootruby-mkrf-0.2.3.orig/README0000644000000000000000000000645011672453175013767 0ustar rootroot= mkrf -- making C extensions for Ruby a bit easier mkrf is a library for generating Rakefiles to build Ruby extension modules written in C. It is intended as a replacement for mkrf. The major difference between the two is that +mkrf+ builds you a Rakefile instead of a Makefile. Major goals of mkrf include * easy code reuse of its Availability class and * simple, well documented, use of the Generator class. == Basic Usage mkrf works similarly to mkmf in that a user writes an extension configuration file and then runs it, generating a Rakefile in the current directory. With mkmf it was customary to name the extension configuration file "extconf.rb". With mkrf, you should name this file "mkrf_conf.rb". In general, mkrf_conf.rb should be placed in the root directory of the extension (ex. PROJECT_ROOT/ext/name_of_module) and it expects, by default, that files to be compiled have a .c extension and reside in that same directory. If your project contains multiple extension modules, then each one would get its own subdirectory under PROJECT_ROOT/ext/ and each would have its own mkrf_conf.rb file. The most basic usage looks like the following, where the name of the extension module being built is "libtrivial": require 'mkrf' Mkrf::Generator.new('libtrivial') Extra arguments may be passed to the Rakefile generator in a block: Mkrf::Generator.new('libtrivial') do |g| g.logger.level = Logger::WARN g.include_library('z') end Another example: Mkrf::Generator.new('libxml') do |g| g.include_library('socket','socket') g.include_header('libxml/xmlversion.h', '/opt/include/libxml2', '/usr/local/include/libxml2', '/usr/include/libxml2') end == Helpers mkrf also comes with rakehelper.rb -- a module which contains methods you may want to use in your project's top-level Rakefile. The docs on using rakehelper do not exist at the moment, but for the time being, have a look at examples/trivial/Rakefile to get an idea of how they're used. == Credits * Jim Weirich for writing Rake == Licence mkrf is available under an MIT-style license. Copyright (c) 2006 Kevin Clark Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. ruby-mkrf-0.2.3.orig/Rakefile0000644000000000000000000000651011672453175014551 0ustar rootrootrequire 'rake' require 'rake/testtask' require 'rake/packagetask' require 'rake/gempackagetask' require 'rake/rdoctask' require 'rubygems' $:.unshift(File.dirname(__FILE__) + "/lib") require 'mkrf' PKG_NAME = 'mkrf' PKG_VERSION = Mkrf::VERSION PKG_FILE_NAME = "#{PKG_NAME}-#{PKG_VERSION}" RELEASE_NAME = "REL #{PKG_VERSION}" RUBY_FORGE_PROJECT = "mkrf" RUBY_FORGE_USER = "kevinclark" task :default => ["test:units"] namespace :test do desc "Run basic tests" Rake::TestTask.new("units") { |t| t.pattern = 'test/unit/test_*.rb' t.verbose = true t.warning = true } desc "Run integration tests" Rake::TestTask.new("integration") { |t| t.pattern = 'test/integration/test_*.rb' t.verbose = true t.warning = true } namespace :samples do BASE_DIR = File.dirname(__FILE__) + '/test/sample_files' SAMPLE_DIRS = { :trivial => BASE_DIR + '/libtrivial/ext/', :syck => BASE_DIR + '/syck-0.55/ext/ruby/ext/syck/', :libxml => BASE_DIR + '/libxml-ruby-0.3.8/ext/xml/', :cpp_bang => BASE_DIR + '/cpp_bang/ext/' } task :default => [:all] desc "Try to compile all of the sample extensions" task :all => [:trivial, :libxml, :syck, :cpp_bang] desc "Try to compile a trivial extension" task :trivial do sh "cd #{SAMPLE_DIRS[:trivial]}; ruby extconf.rb; rake" end desc "Try to compile libxml" task :libxml do sh "cd #{SAMPLE_DIRS[:libxml]}; ruby extconf.rb; rake" end desc "Try to compile syck" task :syck do sh "cd #{SAMPLE_DIRS[:syck]}; ruby extconf.rb; rake" end desc "Try to compile cpp_bang" task :cpp_bang do sh "cd #{SAMPLE_DIRS[:cpp_bang]}; ruby mkrf_config.rb; rake" end desc "Clean up after sample tests" task :clobber do if ENV['PROJECT'] if File.exist?(SAMPLE_DIRS[ENV['PROJECT'].to_sym] + "/Rakefile") sh "cd #{SAMPLE_DIRS[ENV['PROJECT'].to_sym]}; rake clobber; rm Rakefile" end else SAMPLE_DIRS.each_value do |test_dir| next unless File.exist?(test_dir + "/Rakefile") sh "cd #{test_dir}; rake clobber; rm Rakefile" end end end end end Rake::RDocTask.new do |rd| rd.main = "README" rd.rdoc_files.include("README", "lib/**/*.rb") end # Create compressed packages spec = Gem::Specification.new do |s| s.platform = Gem::Platform::RUBY s.name = PKG_NAME s.summary = "Generate Rakefiles to Build C Extensions to Ruby" s.description = %q{This proposed replacement to mkmf generates Rakefiles to build C Extensions.} s.version = PKG_VERSION s.author = "Kevin Clark" s.email = "kevin.clark@gmail.com" s.rubyforge_project = RUBY_FORGE_PROJECT s.homepage = "http://glu.ttono.us" s.has_rdoc = true s.rdoc_options << '--main' << 'README' << '--title' << 'mkrf' s.requirements << 'rake' s.require_path = 'lib' s.autorequire = 'mkrf' s.extra_rdoc_files = [ "README", "MIT-LICENSE", "CHANGELOG" ] s.files = [ "Rakefile", "README", "CHANGELOG", "MIT-LICENSE" ] s.files = s.files + Dir.glob( "lib/**/*" ).delete_if { |item| item.include?( "\.svn" ) } s.files = s.files + Dir.glob( "test/**/*" ).delete_if { |item| item.include?( "\.svn" ) } end Rake::GemPackageTask.new(spec) do |p| p.gem_spec = spec p.need_tar = true p.need_zip = true end ruby-mkrf-0.2.3.orig/MIT-LICENSE0000644000000000000000000000203611672453175014537 0ustar rootrootCopyright (c) 2006 Kevin Clark Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.ruby-mkrf-0.2.3.orig/test/0000755000000000000000000000000011672453175014061 5ustar rootrootruby-mkrf-0.2.3.orig/test/unit/0000755000000000000000000000000011672453175015040 5ustar rootrootruby-mkrf-0.2.3.orig/test/unit/test_availability.rb0000644000000000000000000001104611672453175021100 0ustar rootrootrequire File.dirname(__FILE__) + '/../abstract_unit' class TestAvailability < Test::Unit::TestCase def setup @fixture_path = File.expand_path(File.join(File.dirname(__FILE__), '..', 'fixtures')) @avail = Mkrf::Availability.new(:includes => @fixture_path) end def teardown FileUtils.rm_f 'mkrf.log' end def test_has_library_should_return_true_when_lib_already_loaded @avail = Mkrf::Availability.new(:loaded_libs => ['sample_library']) assert @avail.has_library?('sample_library') end def test_has_library_should_fail_on_bogus_lib assert !@avail.has_library?('bogus_library') end def test_can_link @avail.send(:with_headers, 'stdio.h') do assert @avail.can_link?(@avail.send(:simple_reference, "printf")) end end def test_create_source assert_creates_file(Mkrf::Availability::TEMP_SOURCE_FILE) do @avail.send(:create_source, "puts 'Hello World!'") end source = File.open(Mkrf::Availability::TEMP_SOURCE_FILE).read assert_equal "puts 'Hello World!'", source ensure FileUtils.rm_f Mkrf::Availability::TEMP_SOURCE_FILE end def test_has_header_should_fail_on_bogus_header assert !@avail.has_header?('some_fake_header.h') end def test_has_header_should_work_with_basic_headers assert @avail.has_header?('stdmkrf.h') end def test_has_header_should_check_many_paths assert !@avail.has_header?('header_down_a_directory.h') assert @avail.has_header?('header_down_a_directory.h', File.join(@fixture_path, 'down_a_directory')) end def test_has_header_should_add_define_with_valid_header assert @avail.has_header?('stdmkrf.h') assert @avail.defines.include?('HAVE_STDMKRF_H'), "Defines: #{@avail.defines.inspect}" end def test_include_header assert @avail.has_header?('stdmkrf.h') assert !@avail.headers.include?('stdmkrf.h') @avail.include_header('stdmkrf.h') assert @avail.headers.include?('stdmkrf.h') end # This should really use a trivial lib compiled in fixtures.. def test_include_library assert @avail.has_library?('z') assert !@avail.loaded_libs.include?('z') @avail.include_library('z') assert @avail.loaded_libs.include?('z') end def test_method_missing_should_go_down_chain_when_not_catching_stackable_attributes assert_raises(NoMethodError) { @avail.not_a_stackable_attribute } assert_raises(NoMethodError) { @avail.with_not_a_stackable_attribute } end def test_find_executable_should_return_nil_when_not_found assert_nil @avail.find_executable('fake_executable') end def test_find_executable_should_default_to_search_env_path old_path = ENV['PATH'] ENV['PATH'] = @fixture_path expected = File.join(@fixture_path, 'some_binary') assert_equal expected, @avail.find_executable('some_binary') ensure ENV['PATH'] = old_path end def test_find_executable_should_search_given_paths_if_supplied expected = File.join(@fixture_path, 'some_binary') assert_equal expected, @avail.find_executable('some_binary', @fixture_path) end def test_logging @avail.logger.level = Logger::INFO assert @avail.include_library('z') assert @avail.include_library('z') assert !@avail.include_library('bogus_lib') assert !@avail.include_header('some_fake_header.h') assert @avail.include_header('stdio.h') assert !@avail.has_function?('blah_blah_blah') assert @avail.has_function?('printf') source = File.open('mkrf.log').read [ 'Checking for library: z', 'Library found: z', 'Library already loaded: z', 'Library not found: bogus_lib', 'Header not found: some_fake_header.h', 'Header found: stdio.h', 'Function not found: blah_blah_blah()', 'Function found: printf()' ].each do |log_items| assert_match log_items, source end end end class TestAvailabilityDefaults < Test::Unit::TestCase def setup @avail = Mkrf::Availability.new @config = Config::CONFIG end def test_default_libs_should_be_from_rbconfig assert_equal @config["LIBS"].chomp(" "), @avail.library_compile_string end def test_default_compiler_should_be_from_rbconfig assert_equal @config["CC"], @avail.send(:instance_variable_get, :@compiler) end def test_default_include_dir_should_be_from_rbconfig expected = [Config::CONFIG['includedir'], Config::CONFIG["archdir"], Config::CONFIG['sitelibdir'], "."] assert_equal expected, @avail.send(:instance_variable_get, :@includes) end endruby-mkrf-0.2.3.orig/test/unit/test_generator.rb0000644000000000000000000001121311672453175020410 0ustar rootrootrequire File.dirname(__FILE__) + '/../abstract_unit' require 'rbconfig' # require 'rubygems' # require 'mocha' # require 'stubba' # stubb this out so we don't overwrite our test rakefile module Mkrf class Generator def write_rakefile(file = "Rakefile") end attr_reader :available end end module Kernel def exit(*args) end end class TestGenerator < Test::Unit::TestCase def setup FileUtils.rm_f 'mkrf.log' end def test_default_sources g = Mkrf::Generator.new('testlib') assert_equal ["'*.c'"], g.sources, "Default sources incorrect" end def test_additional_code generator = Mkrf::Generator.new('testlib') do |g| g.additional_code = spec_code end assert_match spec_code, generator.rakefile_contents end def test_logging_levels generator = Mkrf::Generator.new('testlib') do |g| g.logger.level = Logger::WARN g.include_header 'stdio.h' g.include_header 'fake_header.h' end logs = File.open('mkrf.log').read assert_no_match(/INFO/, logs) assert_match(/WARN/, logs) end def test_logging_defaults_to_info_level generator = Mkrf::Generator.new('testlib') do |g| g.include_header 'stdio.h' g.include_header 'fake_header.h' end logs = File.open('mkrf.log').read assert_match(/INFO/, logs) assert_match(/WARN/, logs) end def test_abort_logs_fatal_error generator = Mkrf::Generator.new('testlib') do |g| g.abort! "Fake header wasn't found." unless g.include_header 'fake_header.h' end logs = File.open('mkrf.log').read assert_match(/FATAL/, logs) assert_match("Fake header wasn't found.", logs) end # Need to figure out how to test this.. mocking doesn't seem to work # def test_abort_exits # generator = Mkrf::Generator.new('testlib') do |g| # g.abort! "Aborting!" # end # end def test_availability_options_accessible_in_initialize generator = Mkrf::Generator.new('testlib', ['lib/*.c'], {:loaded_libs => 'static_ruby'}) assert_equal ['static_ruby'], generator.available.loaded_libs end def test_additional_objects obj_string = 'somedir/somefile.o' generator = Mkrf::Generator.new('testlib') do |g| g.objects = obj_string end assert_match obj_string, generator.rakefile_contents end def test_ldshared ldshared = 'this_normally_isnt_here' generator = Mkrf::Generator.new('testlib') do |g| g.ldshared = ldshared end assert_match Regexp.new("LDSHARED = .*#{ldshared}.*"), generator.rakefile_contents end def test_cflags cflags = 'this_normally_isnt_here' generator = Mkrf::Generator.new('testlib') do |g| g.cflags = cflags end assert_match Regexp.new("CFLAGS = .*#{cflags}.*"), generator.rakefile_contents end def test_defines_compile_string generator = Mkrf::Generator.new('testlib') do |g| g.add_define 'HAVE_UNIX' g.include_header 'stdio.h' end assert_match(/HAVE_UNIX/, generator.defines_compile_string) assert_match(/HAVE_STDIO_H/, generator.defines_compile_string) end protected def spec_code <<-SPEC # Create compressed packages spec = Gem::Specification.new do |s| s.platform = Gem::Platform::RUBY s.name = PKG_NAME s.summary = "Generate Rakefiles to Build C Extensions to Ruby" s.description = %q{This proposed replacement to mkmf generates Rakefiles to build C Extensions.} s.version = PKG_VERSION s.author = "Kevin Clark" s.email = "kevin.clark@gmail.com" s.rubyforge_project = RUBY_FORGE_PROJECT s.homepage = "http://glu.ttono.us" s.has_rdoc = true s.requirements << 'rake' s.require_path = 'lib' s.autorequire = 'mkrf' s.files = [ "Rakefile", "README", "CHANGELOG", "MIT-LICENSE" ] s.files = s.files + Dir.glob( "lib/**/*" ).delete_if { |item| item.include?( "\.svn" ) } s.files = s.files + Dir.glob( "test/**/*" ).delete_if { |item| item.include?( "\.svn" ) } end Rake::GemPackageTask.new(spec) do |p| p.gem_spec = spec p.need_tar = true p.need_zip = true end SPEC end end class TestGeneratorDefaults < Test::Unit::TestCase def setup @generator = Mkrf::Generator.new('trivial_lib') end def test_should_default_objects_to_empty_string assert_equal '', @generator.objects end def test_should_default_ldshared_to_empty_string assert_equal '', @generator.ldshared end def test_should_default_cflags_properly expected = "#{Config::CONFIG['CCDLFLAGS']} #{Config::CONFIG['CFLAGS']} #{Config::CONFIG['ARCH_FLAG']}" assert_equal expected, @generator.cflags end endruby-mkrf-0.2.3.orig/test/integration/0000755000000000000000000000000011672453175016404 5ustar rootrootruby-mkrf-0.2.3.orig/test/integration/test_sample_projects.rb0000644000000000000000000000147111672453175023165 0ustar rootrootrequire File.dirname(__FILE__) + '/../abstract_unit' class TestSampleProjects < Test::Unit::TestCase SAMPLES_DIR = File.dirname(__FILE__) + '/../sample_files' SAMPLE_LIBS = { :trivial => "/libtrivial/ext/libtrivial.#{Config::CONFIG['DLEXT']}", :syck => "/syck-0.55/ext/ruby/ext/syck/syck.#{Config::CONFIG['DLEXT']}", :libxml => "/libxml-ruby-0.3.8/ext/xml/libxml_so.#{Config::CONFIG['DLEXT']}" } # Set to true for full command line output @@debug = false SAMPLE_LIBS.each do |k,v| define_method("test_that_#{k}_compiles") do silence_command_line do system("rake test:samples:clobber PROJECT=#{k}") end assert_creates_file(SAMPLES_DIR + v) do silence_command_line do system("rake test:samples:#{k}") end end end end endruby-mkrf-0.2.3.orig/test/sample_files/0000755000000000000000000000000011672453175016524 5ustar rootrootruby-mkrf-0.2.3.orig/test/sample_files/syck-0.55/0000755000000000000000000000000011672453175020062 5ustar rootrootruby-mkrf-0.2.3.orig/test/sample_files/syck-0.55/COPYING0000644000000000000000000000527511672453175021126 0ustar rootrootCopyright (c) 2003 why the lucky stiff This software is subject to either of two licenses (BSD or D&R), which you can choose from in your use of the code. The terms for each of these licenses is listed below: BSD License =========== Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. D&R (Death and Repudiation) License =================================== This software may not be used directly by any living being. ANY use of this software (even perfectly legitimate and non-commercial uses) until after death is explicitly restricted. Any living being using (or attempting to use) this software will be punished to the fullest extent of the law. For your protection, corpses will not be punished. We respectfully request that you submit your uses (revisions, uses, distributions, uses, etc.) to your children, who may vicariously perform these uses on your behalf. If you use this software and you are found to be not dead, you will be punished to the fullest extent of the law. If you are found to be a ghost or angel, you will be punished to the fullest extent of the law. After your following the terms of this license, the author has vowed to repudiate your claim, meaning that the validity of this contract will no longer be recognized. This license will be unexpectedly revoked (at a time which is designated to be most inconvenient) and involved heirs will be punished to the fullest extent of the law. Furthermore, if any parties (related or non-related) escape the punishments outlined herein, they will be severely punished to the fullest extent of a new revised law that (1) expands the statement "fullest extent of the law" to encompass an infinite duration of infinite punishments and (2) exacts said punishments upon all parties (related or non-related). ruby-mkrf-0.2.3.orig/test/sample_files/syck-0.55/configure0000644000000000000000000060536111672453175022001 0ustar rootroot#! /bin/sh # Guess values for system-dependent variables and create Makefiles. # Generated by GNU Autoconf 2.59 for syck 0.54. # # Copyright (C) 2003 Free Software Foundation, Inc. # This configure script is free software; the Free Software Foundation # gives unlimited permission to copy, distribute and modify it. ## --------------------- ## ## M4sh Initialization. ## ## --------------------- ## # Be Bourne compatible if test -n "${ZSH_VERSION+set}" && (emulate sh) >/dev/null 2>&1; then emulate sh NULLCMD=: # Zsh 3.x and 4.x performs word splitting on ${1+"$@"}, which # is contrary to our usage. Disable this feature. alias -g '${1+"$@"}'='"$@"' elif test -n "${BASH_VERSION+set}" && (set -o posix) >/dev/null 2>&1; then set -o posix fi DUALCASE=1; export DUALCASE # for MKS sh # Support unset when possible. if ( (MAIL=60; unset MAIL) || exit) >/dev/null 2>&1; then as_unset=unset else as_unset=false fi # Work around bugs in pre-3.0 UWIN ksh. $as_unset ENV MAIL MAILPATH PS1='$ ' PS2='> ' PS4='+ ' # NLS nuisances. for as_var in \ LANG LANGUAGE LC_ADDRESS LC_ALL LC_COLLATE LC_CTYPE LC_IDENTIFICATION \ LC_MEASUREMENT LC_MESSAGES LC_MONETARY LC_NAME LC_NUMERIC LC_PAPER \ LC_TELEPHONE LC_TIME do if (set +x; test -z "`(eval $as_var=C; export $as_var) 2>&1`"); then eval $as_var=C; export $as_var else $as_unset $as_var fi done # Required to use basename. if expr a : '\(a\)' >/dev/null 2>&1; then as_expr=expr else as_expr=false fi if (basename /) >/dev/null 2>&1 && test "X`basename / 2>&1`" = "X/"; then as_basename=basename else as_basename=false fi # Name of the executable. as_me=`$as_basename "$0" || $as_expr X/"$0" : '.*/\([^/][^/]*\)/*$' \| \ X"$0" : 'X\(//\)$' \| \ X"$0" : 'X\(/\)$' \| \ . : '\(.\)' 2>/dev/null || echo X/"$0" | sed '/^.*\/\([^/][^/]*\)\/*$/{ s//\1/; q; } /^X\/\(\/\/\)$/{ s//\1/; q; } /^X\/\(\/\).*/{ s//\1/; q; } s/.*/./; q'` # PATH needs CR, and LINENO needs CR and PATH. # Avoid depending upon Character Ranges. as_cr_letters='abcdefghijklmnopqrstuvwxyz' as_cr_LETTERS='ABCDEFGHIJKLMNOPQRSTUVWXYZ' as_cr_Letters=$as_cr_letters$as_cr_LETTERS as_cr_digits='0123456789' as_cr_alnum=$as_cr_Letters$as_cr_digits # The user is always right. if test "${PATH_SEPARATOR+set}" != set; then echo "#! /bin/sh" >conf$$.sh echo "exit 0" >>conf$$.sh chmod +x conf$$.sh if (PATH="/nonexistent;."; conf$$.sh) >/dev/null 2>&1; then PATH_SEPARATOR=';' else PATH_SEPARATOR=: fi rm -f conf$$.sh fi as_lineno_1=$LINENO as_lineno_2=$LINENO as_lineno_3=`(expr $as_lineno_1 + 1) 2>/dev/null` test "x$as_lineno_1" != "x$as_lineno_2" && test "x$as_lineno_3" = "x$as_lineno_2" || { # Find who we are. Look in the path if we contain no path at all # relative or not. case $0 in *[\\/]* ) as_myself=$0 ;; *) as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. test -r "$as_dir/$0" && as_myself=$as_dir/$0 && break done ;; esac # We did not find ourselves, most probably we were run as `sh COMMAND' # in which case we are not to be found in the path. if test "x$as_myself" = x; then as_myself=$0 fi if test ! -f "$as_myself"; then { echo "$as_me: error: cannot find myself; rerun with an absolute path" >&2 { (exit 1); exit 1; }; } fi case $CONFIG_SHELL in '') as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in /bin$PATH_SEPARATOR/usr/bin$PATH_SEPARATOR$PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for as_base in sh bash ksh sh5; do case $as_dir in /*) if ("$as_dir/$as_base" -c ' as_lineno_1=$LINENO as_lineno_2=$LINENO as_lineno_3=`(expr $as_lineno_1 + 1) 2>/dev/null` test "x$as_lineno_1" != "x$as_lineno_2" && test "x$as_lineno_3" = "x$as_lineno_2" ') 2>/dev/null; then $as_unset BASH_ENV || test "${BASH_ENV+set}" != set || { BASH_ENV=; export BASH_ENV; } $as_unset ENV || test "${ENV+set}" != set || { ENV=; export ENV; } CONFIG_SHELL=$as_dir/$as_base export CONFIG_SHELL exec "$CONFIG_SHELL" "$0" ${1+"$@"} fi;; esac done done ;; esac # Create $as_me.lineno as a copy of $as_myself, but with $LINENO # uniformly replaced by the line number. The first 'sed' inserts a # line-number line before each line; the second 'sed' does the real # work. The second script uses 'N' to pair each line-number line # with the numbered line, and appends trailing '-' during # substitution so that $LINENO is not a special case at line end. # (Raja R Harinath suggested sed '=', and Paul Eggert wrote the # second 'sed' script. Blame Lee E. McMahon for sed's syntax. :-) sed '=' <$as_myself | sed ' N s,$,-, : loop s,^\(['$as_cr_digits']*\)\(.*\)[$]LINENO\([^'$as_cr_alnum'_]\),\1\2\1\3, t loop s,-$,, s,^['$as_cr_digits']*\n,, ' >$as_me.lineno && chmod +x $as_me.lineno || { echo "$as_me: error: cannot create $as_me.lineno; rerun with a POSIX shell" >&2 { (exit 1); exit 1; }; } # Don't try to exec as it changes $[0], causing all sort of problems # (the dirname of $[0] is not the place where we might find the # original and so on. Autoconf is especially sensible to this). . ./$as_me.lineno # Exit status is that of the last command. exit } case `echo "testing\c"; echo 1,2,3`,`echo -n testing; echo 1,2,3` in *c*,-n*) ECHO_N= ECHO_C=' ' ECHO_T=' ' ;; *c*,* ) ECHO_N=-n ECHO_C= ECHO_T= ;; *) ECHO_N= ECHO_C='\c' ECHO_T= ;; esac if expr a : '\(a\)' >/dev/null 2>&1; then as_expr=expr else as_expr=false fi rm -f conf$$ conf$$.exe conf$$.file echo >conf$$.file if ln -s conf$$.file conf$$ 2>/dev/null; then # We could just check for DJGPP; but this test a) works b) is more generic # and c) will remain valid once DJGPP supports symlinks (DJGPP 2.04). if test -f conf$$.exe; then # Don't use ln at all; we don't have any links as_ln_s='cp -p' else as_ln_s='ln -s' fi elif ln conf$$.file conf$$ 2>/dev/null; then as_ln_s=ln else as_ln_s='cp -p' fi rm -f conf$$ conf$$.exe conf$$.file if mkdir -p . 2>/dev/null; then as_mkdir_p=: else test -d ./-p && rmdir ./-p as_mkdir_p=false fi as_executable_p="test -f" # Sed expression to map a string onto a valid CPP name. as_tr_cpp="eval sed 'y%*$as_cr_letters%P$as_cr_LETTERS%;s%[^_$as_cr_alnum]%_%g'" # Sed expression to map a string onto a valid variable name. as_tr_sh="eval sed 'y%*+%pp%;s%[^_$as_cr_alnum]%_%g'" # IFS # We need space, tab and new line, in precisely that order. as_nl=' ' IFS=" $as_nl" # CDPATH. $as_unset CDPATH # Name of the host. # hostname on some systems (SVR3.2, Linux) returns a bogus exit status, # so uname gets run too. ac_hostname=`(hostname || uname -n) 2>/dev/null | sed 1q` exec 6>&1 # # Initializations. # ac_default_prefix=/usr/local ac_config_libobj_dir=. cross_compiling=no subdirs= MFLAGS= MAKEFLAGS= SHELL=${CONFIG_SHELL-/bin/sh} # Maximum number of lines to put in a shell here document. # This variable seems obsolete. It should probably be removed, and # only ac_max_sed_lines should be used. : ${ac_max_here_lines=38} # Identity of this package. PACKAGE_NAME='syck' PACKAGE_TARNAME='syck' PACKAGE_VERSION='0.54' PACKAGE_STRING='syck 0.54' PACKAGE_BUGREPORT='' # Factoring default headers for most tests. ac_includes_default="\ #include #if HAVE_SYS_TYPES_H # include #endif #if HAVE_SYS_STAT_H # include #endif #if STDC_HEADERS # include # include #else # if HAVE_STDLIB_H # include # endif #endif #if HAVE_STRING_H # if !STDC_HEADERS && HAVE_MEMORY_H # include # endif # include #endif #if HAVE_STRINGS_H # include #endif #if HAVE_INTTYPES_H # include #else # if HAVE_STDINT_H # include # endif #endif #if HAVE_UNISTD_H # include #endif" ac_subst_vars='SHELL PATH_SEPARATOR PACKAGE_NAME PACKAGE_TARNAME PACKAGE_VERSION PACKAGE_STRING PACKAGE_BUGREPORT exec_prefix prefix program_transform_name bindir sbindir libexecdir datadir sysconfdir sharedstatedir localstatedir libdir includedir oldincludedir infodir mandir build_alias host_alias target_alias DEFS ECHO_C ECHO_N ECHO_T LIBS INSTALL_PROGRAM INSTALL_SCRIPT INSTALL_DATA CYGPATH_W PACKAGE VERSION ACLOCAL AUTOCONF AUTOMAKE AUTOHEADER MAKEINFO install_sh STRIP ac_ct_STRIP INSTALL_STRIP_PROGRAM mkdir_p AWK SET_MAKE am__leading_dot AMTAR am__tar am__untar LN_S RANLIB ac_ct_RANLIB YACC LEX CC CFLAGS LDFLAGS CPPFLAGS ac_ct_CC EXEEXT OBJEXT DEPDIR am__include am__quote AMDEP_TRUE AMDEP_FALSE AMDEPBACKSLASH CCDEPMODE am__fastdepCC_TRUE am__fastdepCC_FALSE LEXLIB LEX_OUTPUT_ROOT CPP EGREP LIBOBJS LTLIBOBJS' ac_subst_files='' # Initialize some variables set by options. ac_init_help= ac_init_version=false # The variables have the same names as the options, with # dashes changed to underlines. cache_file=/dev/null exec_prefix=NONE no_create= no_recursion= prefix=NONE program_prefix=NONE program_suffix=NONE program_transform_name=s,x,x, silent= site= srcdir= verbose= x_includes=NONE x_libraries=NONE # Installation directory options. # These are left unexpanded so users can "make install exec_prefix=/foo" # and all the variables that are supposed to be based on exec_prefix # by default will actually change. # Use braces instead of parens because sh, perl, etc. also accept them. bindir='${exec_prefix}/bin' sbindir='${exec_prefix}/sbin' libexecdir='${exec_prefix}/libexec' datadir='${prefix}/share' sysconfdir='${prefix}/etc' sharedstatedir='${prefix}/com' localstatedir='${prefix}/var' libdir='${exec_prefix}/lib' includedir='${prefix}/include' oldincludedir='/usr/include' infodir='${prefix}/info' mandir='${prefix}/man' ac_prev= for ac_option do # If the previous option needs an argument, assign it. if test -n "$ac_prev"; then eval "$ac_prev=\$ac_option" ac_prev= continue fi ac_optarg=`expr "x$ac_option" : 'x[^=]*=\(.*\)'` # Accept the important Cygnus configure options, so we can diagnose typos. case $ac_option in -bindir | --bindir | --bindi | --bind | --bin | --bi) ac_prev=bindir ;; -bindir=* | --bindir=* | --bindi=* | --bind=* | --bin=* | --bi=*) bindir=$ac_optarg ;; -build | --build | --buil | --bui | --bu) ac_prev=build_alias ;; -build=* | --build=* | --buil=* | --bui=* | --bu=*) build_alias=$ac_optarg ;; -cache-file | --cache-file | --cache-fil | --cache-fi \ | --cache-f | --cache- | --cache | --cach | --cac | --ca | --c) ac_prev=cache_file ;; -cache-file=* | --cache-file=* | --cache-fil=* | --cache-fi=* \ | --cache-f=* | --cache-=* | --cache=* | --cach=* | --cac=* | --ca=* | --c=*) cache_file=$ac_optarg ;; --config-cache | -C) cache_file=config.cache ;; -datadir | --datadir | --datadi | --datad | --data | --dat | --da) ac_prev=datadir ;; -datadir=* | --datadir=* | --datadi=* | --datad=* | --data=* | --dat=* \ | --da=*) datadir=$ac_optarg ;; -disable-* | --disable-*) ac_feature=`expr "x$ac_option" : 'x-*disable-\(.*\)'` # Reject names that are not valid shell variable names. expr "x$ac_feature" : ".*[^-_$as_cr_alnum]" >/dev/null && { echo "$as_me: error: invalid feature name: $ac_feature" >&2 { (exit 1); exit 1; }; } ac_feature=`echo $ac_feature | sed 's/-/_/g'` eval "enable_$ac_feature=no" ;; -enable-* | --enable-*) ac_feature=`expr "x$ac_option" : 'x-*enable-\([^=]*\)'` # Reject names that are not valid shell variable names. expr "x$ac_feature" : ".*[^-_$as_cr_alnum]" >/dev/null && { echo "$as_me: error: invalid feature name: $ac_feature" >&2 { (exit 1); exit 1; }; } ac_feature=`echo $ac_feature | sed 's/-/_/g'` case $ac_option in *=*) ac_optarg=`echo "$ac_optarg" | sed "s/'/'\\\\\\\\''/g"`;; *) ac_optarg=yes ;; esac eval "enable_$ac_feature='$ac_optarg'" ;; -exec-prefix | --exec_prefix | --exec-prefix | --exec-prefi \ | --exec-pref | --exec-pre | --exec-pr | --exec-p | --exec- \ | --exec | --exe | --ex) ac_prev=exec_prefix ;; -exec-prefix=* | --exec_prefix=* | --exec-prefix=* | --exec-prefi=* \ | --exec-pref=* | --exec-pre=* | --exec-pr=* | --exec-p=* | --exec-=* \ | --exec=* | --exe=* | --ex=*) exec_prefix=$ac_optarg ;; -gas | --gas | --ga | --g) # Obsolete; use --with-gas. with_gas=yes ;; -help | --help | --hel | --he | -h) ac_init_help=long ;; -help=r* | --help=r* | --hel=r* | --he=r* | -hr*) ac_init_help=recursive ;; -help=s* | --help=s* | --hel=s* | --he=s* | -hs*) ac_init_help=short ;; -host | --host | --hos | --ho) ac_prev=host_alias ;; -host=* | --host=* | --hos=* | --ho=*) host_alias=$ac_optarg ;; -includedir | --includedir | --includedi | --included | --include \ | --includ | --inclu | --incl | --inc) ac_prev=includedir ;; -includedir=* | --includedir=* | --includedi=* | --included=* | --include=* \ | --includ=* | --inclu=* | --incl=* | --inc=*) includedir=$ac_optarg ;; -infodir | --infodir | --infodi | --infod | --info | --inf) ac_prev=infodir ;; -infodir=* | --infodir=* | --infodi=* | --infod=* | --info=* | --inf=*) infodir=$ac_optarg ;; -libdir | --libdir | --libdi | --libd) ac_prev=libdir ;; -libdir=* | --libdir=* | --libdi=* | --libd=*) libdir=$ac_optarg ;; -libexecdir | --libexecdir | --libexecdi | --libexecd | --libexec \ | --libexe | --libex | --libe) ac_prev=libexecdir ;; -libexecdir=* | --libexecdir=* | --libexecdi=* | --libexecd=* | --libexec=* \ | --libexe=* | --libex=* | --libe=*) libexecdir=$ac_optarg ;; -localstatedir | --localstatedir | --localstatedi | --localstated \ | --localstate | --localstat | --localsta | --localst \ | --locals | --local | --loca | --loc | --lo) ac_prev=localstatedir ;; -localstatedir=* | --localstatedir=* | --localstatedi=* | --localstated=* \ | --localstate=* | --localstat=* | --localsta=* | --localst=* \ | --locals=* | --local=* | --loca=* | --loc=* | --lo=*) localstatedir=$ac_optarg ;; -mandir | --mandir | --mandi | --mand | --man | --ma | --m) ac_prev=mandir ;; -mandir=* | --mandir=* | --mandi=* | --mand=* | --man=* | --ma=* | --m=*) mandir=$ac_optarg ;; -nfp | --nfp | --nf) # Obsolete; use --without-fp. with_fp=no ;; -no-create | --no-create | --no-creat | --no-crea | --no-cre \ | --no-cr | --no-c | -n) no_create=yes ;; -no-recursion | --no-recursion | --no-recursio | --no-recursi \ | --no-recurs | --no-recur | --no-recu | --no-rec | --no-re | --no-r) no_recursion=yes ;; -oldincludedir | --oldincludedir | --oldincludedi | --oldincluded \ | --oldinclude | --oldinclud | --oldinclu | --oldincl | --oldinc \ | --oldin | --oldi | --old | --ol | --o) ac_prev=oldincludedir ;; -oldincludedir=* | --oldincludedir=* | --oldincludedi=* | --oldincluded=* \ | --oldinclude=* | --oldinclud=* | --oldinclu=* | --oldincl=* | --oldinc=* \ | --oldin=* | --oldi=* | --old=* | --ol=* | --o=*) oldincludedir=$ac_optarg ;; -prefix | --prefix | --prefi | --pref | --pre | --pr | --p) ac_prev=prefix ;; -prefix=* | --prefix=* | --prefi=* | --pref=* | --pre=* | --pr=* | --p=*) prefix=$ac_optarg ;; -program-prefix | --program-prefix | --program-prefi | --program-pref \ | --program-pre | --program-pr | --program-p) ac_prev=program_prefix ;; -program-prefix=* | --program-prefix=* | --program-prefi=* \ | --program-pref=* | --program-pre=* | --program-pr=* | --program-p=*) program_prefix=$ac_optarg ;; -program-suffix | --program-suffix | --program-suffi | --program-suff \ | --program-suf | --program-su | --program-s) ac_prev=program_suffix ;; -program-suffix=* | --program-suffix=* | --program-suffi=* \ | --program-suff=* | --program-suf=* | --program-su=* | --program-s=*) program_suffix=$ac_optarg ;; -program-transform-name | --program-transform-name \ | --program-transform-nam | --program-transform-na \ | --program-transform-n | --program-transform- \ | --program-transform | --program-transfor \ | --program-transfo | --program-transf \ | --program-trans | --program-tran \ | --progr-tra | --program-tr | --program-t) ac_prev=program_transform_name ;; -program-transform-name=* | --program-transform-name=* \ | --program-transform-nam=* | --program-transform-na=* \ | --program-transform-n=* | --program-transform-=* \ | --program-transform=* | --program-transfor=* \ | --program-transfo=* | --program-transf=* \ | --program-trans=* | --program-tran=* \ | --progr-tra=* | --program-tr=* | --program-t=*) program_transform_name=$ac_optarg ;; -q | -quiet | --quiet | --quie | --qui | --qu | --q \ | -silent | --silent | --silen | --sile | --sil) silent=yes ;; -sbindir | --sbindir | --sbindi | --sbind | --sbin | --sbi | --sb) ac_prev=sbindir ;; -sbindir=* | --sbindir=* | --sbindi=* | --sbind=* | --sbin=* \ | --sbi=* | --sb=*) sbindir=$ac_optarg ;; -sharedstatedir | --sharedstatedir | --sharedstatedi \ | --sharedstated | --sharedstate | --sharedstat | --sharedsta \ | --sharedst | --shareds | --shared | --share | --shar \ | --sha | --sh) ac_prev=sharedstatedir ;; -sharedstatedir=* | --sharedstatedir=* | --sharedstatedi=* \ | --sharedstated=* | --sharedstate=* | --sharedstat=* | --sharedsta=* \ | --sharedst=* | --shareds=* | --shared=* | --share=* | --shar=* \ | --sha=* | --sh=*) sharedstatedir=$ac_optarg ;; -site | --site | --sit) ac_prev=site ;; -site=* | --site=* | --sit=*) site=$ac_optarg ;; -srcdir | --srcdir | --srcdi | --srcd | --src | --sr) ac_prev=srcdir ;; -srcdir=* | --srcdir=* | --srcdi=* | --srcd=* | --src=* | --sr=*) srcdir=$ac_optarg ;; -sysconfdir | --sysconfdir | --sysconfdi | --sysconfd | --sysconf \ | --syscon | --sysco | --sysc | --sys | --sy) ac_prev=sysconfdir ;; -sysconfdir=* | --sysconfdir=* | --sysconfdi=* | --sysconfd=* | --sysconf=* \ | --syscon=* | --sysco=* | --sysc=* | --sys=* | --sy=*) sysconfdir=$ac_optarg ;; -target | --target | --targe | --targ | --tar | --ta | --t) ac_prev=target_alias ;; -target=* | --target=* | --targe=* | --targ=* | --tar=* | --ta=* | --t=*) target_alias=$ac_optarg ;; -v | -verbose | --verbose | --verbos | --verbo | --verb) verbose=yes ;; -version | --version | --versio | --versi | --vers | -V) ac_init_version=: ;; -with-* | --with-*) ac_package=`expr "x$ac_option" : 'x-*with-\([^=]*\)'` # Reject names that are not valid shell variable names. expr "x$ac_package" : ".*[^-_$as_cr_alnum]" >/dev/null && { echo "$as_me: error: invalid package name: $ac_package" >&2 { (exit 1); exit 1; }; } ac_package=`echo $ac_package| sed 's/-/_/g'` case $ac_option in *=*) ac_optarg=`echo "$ac_optarg" | sed "s/'/'\\\\\\\\''/g"`;; *) ac_optarg=yes ;; esac eval "with_$ac_package='$ac_optarg'" ;; -without-* | --without-*) ac_package=`expr "x$ac_option" : 'x-*without-\(.*\)'` # Reject names that are not valid shell variable names. expr "x$ac_package" : ".*[^-_$as_cr_alnum]" >/dev/null && { echo "$as_me: error: invalid package name: $ac_package" >&2 { (exit 1); exit 1; }; } ac_package=`echo $ac_package | sed 's/-/_/g'` eval "with_$ac_package=no" ;; --x) # Obsolete; use --with-x. with_x=yes ;; -x-includes | --x-includes | --x-include | --x-includ | --x-inclu \ | --x-incl | --x-inc | --x-in | --x-i) ac_prev=x_includes ;; -x-includes=* | --x-includes=* | --x-include=* | --x-includ=* | --x-inclu=* \ | --x-incl=* | --x-inc=* | --x-in=* | --x-i=*) x_includes=$ac_optarg ;; -x-libraries | --x-libraries | --x-librarie | --x-librari \ | --x-librar | --x-libra | --x-libr | --x-lib | --x-li | --x-l) ac_prev=x_libraries ;; -x-libraries=* | --x-libraries=* | --x-librarie=* | --x-librari=* \ | --x-librar=* | --x-libra=* | --x-libr=* | --x-lib=* | --x-li=* | --x-l=*) x_libraries=$ac_optarg ;; -*) { echo "$as_me: error: unrecognized option: $ac_option Try \`$0 --help' for more information." >&2 { (exit 1); exit 1; }; } ;; *=*) ac_envvar=`expr "x$ac_option" : 'x\([^=]*\)='` # Reject names that are not valid shell variable names. expr "x$ac_envvar" : ".*[^_$as_cr_alnum]" >/dev/null && { echo "$as_me: error: invalid variable name: $ac_envvar" >&2 { (exit 1); exit 1; }; } ac_optarg=`echo "$ac_optarg" | sed "s/'/'\\\\\\\\''/g"` eval "$ac_envvar='$ac_optarg'" export $ac_envvar ;; *) # FIXME: should be removed in autoconf 3.0. echo "$as_me: WARNING: you should use --build, --host, --target" >&2 expr "x$ac_option" : ".*[^-._$as_cr_alnum]" >/dev/null && echo "$as_me: WARNING: invalid host type: $ac_option" >&2 : ${build_alias=$ac_option} ${host_alias=$ac_option} ${target_alias=$ac_option} ;; esac done if test -n "$ac_prev"; then ac_option=--`echo $ac_prev | sed 's/_/-/g'` { echo "$as_me: error: missing argument to $ac_option" >&2 { (exit 1); exit 1; }; } fi # Be sure to have absolute paths. for ac_var in exec_prefix prefix do eval ac_val=$`echo $ac_var` case $ac_val in [\\/$]* | ?:[\\/]* | NONE | '' ) ;; *) { echo "$as_me: error: expected an absolute directory name for --$ac_var: $ac_val" >&2 { (exit 1); exit 1; }; };; esac done # Be sure to have absolute paths. for ac_var in bindir sbindir libexecdir datadir sysconfdir sharedstatedir \ localstatedir libdir includedir oldincludedir infodir mandir do eval ac_val=$`echo $ac_var` case $ac_val in [\\/$]* | ?:[\\/]* ) ;; *) { echo "$as_me: error: expected an absolute directory name for --$ac_var: $ac_val" >&2 { (exit 1); exit 1; }; };; esac done # There might be people who depend on the old broken behavior: `$host' # used to hold the argument of --host etc. # FIXME: To remove some day. build=$build_alias host=$host_alias target=$target_alias # FIXME: To remove some day. if test "x$host_alias" != x; then if test "x$build_alias" = x; then cross_compiling=maybe echo "$as_me: WARNING: If you wanted to set the --build type, don't use --host. If a cross compiler is detected then cross compile mode will be used." >&2 elif test "x$build_alias" != "x$host_alias"; then cross_compiling=yes fi fi ac_tool_prefix= test -n "$host_alias" && ac_tool_prefix=$host_alias- test "$silent" = yes && exec 6>/dev/null # Find the source files, if location was not specified. if test -z "$srcdir"; then ac_srcdir_defaulted=yes # Try the directory containing this script, then its parent. ac_confdir=`(dirname "$0") 2>/dev/null || $as_expr X"$0" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ X"$0" : 'X\(//\)[^/]' \| \ X"$0" : 'X\(//\)$' \| \ X"$0" : 'X\(/\)' \| \ . : '\(.\)' 2>/dev/null || echo X"$0" | sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ s//\1/; q; } /^X\(\/\/\)[^/].*/{ s//\1/; q; } /^X\(\/\/\)$/{ s//\1/; q; } /^X\(\/\).*/{ s//\1/; q; } s/.*/./; q'` srcdir=$ac_confdir if test ! -r $srcdir/$ac_unique_file; then srcdir=.. fi else ac_srcdir_defaulted=no fi if test ! -r $srcdir/$ac_unique_file; then if test "$ac_srcdir_defaulted" = yes; then { echo "$as_me: error: cannot find sources ($ac_unique_file) in $ac_confdir or .." >&2 { (exit 1); exit 1; }; } else { echo "$as_me: error: cannot find sources ($ac_unique_file) in $srcdir" >&2 { (exit 1); exit 1; }; } fi fi (cd $srcdir && test -r ./$ac_unique_file) 2>/dev/null || { echo "$as_me: error: sources are in $srcdir, but \`cd $srcdir' does not work" >&2 { (exit 1); exit 1; }; } srcdir=`echo "$srcdir" | sed 's%\([^\\/]\)[\\/]*$%\1%'` ac_env_build_alias_set=${build_alias+set} ac_env_build_alias_value=$build_alias ac_cv_env_build_alias_set=${build_alias+set} ac_cv_env_build_alias_value=$build_alias ac_env_host_alias_set=${host_alias+set} ac_env_host_alias_value=$host_alias ac_cv_env_host_alias_set=${host_alias+set} ac_cv_env_host_alias_value=$host_alias ac_env_target_alias_set=${target_alias+set} ac_env_target_alias_value=$target_alias ac_cv_env_target_alias_set=${target_alias+set} ac_cv_env_target_alias_value=$target_alias ac_env_CC_set=${CC+set} ac_env_CC_value=$CC ac_cv_env_CC_set=${CC+set} ac_cv_env_CC_value=$CC ac_env_CFLAGS_set=${CFLAGS+set} ac_env_CFLAGS_value=$CFLAGS ac_cv_env_CFLAGS_set=${CFLAGS+set} ac_cv_env_CFLAGS_value=$CFLAGS ac_env_LDFLAGS_set=${LDFLAGS+set} ac_env_LDFLAGS_value=$LDFLAGS ac_cv_env_LDFLAGS_set=${LDFLAGS+set} ac_cv_env_LDFLAGS_value=$LDFLAGS ac_env_CPPFLAGS_set=${CPPFLAGS+set} ac_env_CPPFLAGS_value=$CPPFLAGS ac_cv_env_CPPFLAGS_set=${CPPFLAGS+set} ac_cv_env_CPPFLAGS_value=$CPPFLAGS ac_env_CPP_set=${CPP+set} ac_env_CPP_value=$CPP ac_cv_env_CPP_set=${CPP+set} ac_cv_env_CPP_value=$CPP # # Report the --help message. # if test "$ac_init_help" = "long"; then # Omit some internal or obsolete options to make the list less imposing. # This message is too long to be a string in the A/UX 3.1 sh. cat <<_ACEOF \`configure' configures syck 0.54 to adapt to many kinds of systems. Usage: $0 [OPTION]... [VAR=VALUE]... To assign environment variables (e.g., CC, CFLAGS...), specify them as VAR=VALUE. See below for descriptions of some of the useful variables. Defaults for the options are specified in brackets. Configuration: -h, --help display this help and exit --help=short display options specific to this package --help=recursive display the short help of all the included packages -V, --version display version information and exit -q, --quiet, --silent do not print \`checking...' messages --cache-file=FILE cache test results in FILE [disabled] -C, --config-cache alias for \`--cache-file=config.cache' -n, --no-create do not create output files --srcdir=DIR find the sources in DIR [configure dir or \`..'] _ACEOF cat <<_ACEOF Installation directories: --prefix=PREFIX install architecture-independent files in PREFIX [$ac_default_prefix] --exec-prefix=EPREFIX install architecture-dependent files in EPREFIX [PREFIX] By default, \`make install' will install all the files in \`$ac_default_prefix/bin', \`$ac_default_prefix/lib' etc. You can specify an installation prefix other than \`$ac_default_prefix' using \`--prefix', for instance \`--prefix=\$HOME'. For better control, use the options below. Fine tuning of the installation directories: --bindir=DIR user executables [EPREFIX/bin] --sbindir=DIR system admin executables [EPREFIX/sbin] --libexecdir=DIR program executables [EPREFIX/libexec] --datadir=DIR read-only architecture-independent data [PREFIX/share] --sysconfdir=DIR read-only single-machine data [PREFIX/etc] --sharedstatedir=DIR modifiable architecture-independent data [PREFIX/com] --localstatedir=DIR modifiable single-machine data [PREFIX/var] --libdir=DIR object code libraries [EPREFIX/lib] --includedir=DIR C header files [PREFIX/include] --oldincludedir=DIR C header files for non-gcc [/usr/include] --infodir=DIR info documentation [PREFIX/info] --mandir=DIR man documentation [PREFIX/man] _ACEOF cat <<\_ACEOF Program names: --program-prefix=PREFIX prepend PREFIX to installed program names --program-suffix=SUFFIX append SUFFIX to installed program names --program-transform-name=PROGRAM run sed PROGRAM on installed program names _ACEOF fi if test -n "$ac_init_help"; then case $ac_init_help in short | recursive ) echo "Configuration of syck 0.54:";; esac cat <<\_ACEOF Optional Features: --disable-FEATURE do not include FEATURE (same as --enable-FEATURE=no) --enable-FEATURE[=ARG] include FEATURE [ARG=yes] --disable-dependency-tracking speeds up one-time build --enable-dependency-tracking do not reject slow dependency extractors Some influential environment variables: CC C compiler command CFLAGS C compiler flags LDFLAGS linker flags, e.g. -L if you have libraries in a nonstandard directory CPPFLAGS C/C++ preprocessor flags, e.g. -I if you have headers in a nonstandard directory CPP C preprocessor Use these variables to override the choices made by `configure' or to help it to find libraries and programs with nonstandard names/locations. _ACEOF fi if test "$ac_init_help" = "recursive"; then # If there are subdirs, report their specific --help. ac_popdir=`pwd` for ac_dir in : $ac_subdirs_all; do test "x$ac_dir" = x: && continue test -d $ac_dir || continue ac_builddir=. if test "$ac_dir" != .; then ac_dir_suffix=/`echo "$ac_dir" | sed 's,^\.[\\/],,'` # A "../" for each directory in $ac_dir_suffix. ac_top_builddir=`echo "$ac_dir_suffix" | sed 's,/[^\\/]*,../,g'` else ac_dir_suffix= ac_top_builddir= fi case $srcdir in .) # No --srcdir option. We are building in place. ac_srcdir=. if test -z "$ac_top_builddir"; then ac_top_srcdir=. else ac_top_srcdir=`echo $ac_top_builddir | sed 's,/$,,'` fi ;; [\\/]* | ?:[\\/]* ) # Absolute path. ac_srcdir=$srcdir$ac_dir_suffix; ac_top_srcdir=$srcdir ;; *) # Relative path. ac_srcdir=$ac_top_builddir$srcdir$ac_dir_suffix ac_top_srcdir=$ac_top_builddir$srcdir ;; esac # Do not use `cd foo && pwd` to compute absolute paths, because # the directories may not exist. case `pwd` in .) ac_abs_builddir="$ac_dir";; *) case "$ac_dir" in .) ac_abs_builddir=`pwd`;; [\\/]* | ?:[\\/]* ) ac_abs_builddir="$ac_dir";; *) ac_abs_builddir=`pwd`/"$ac_dir";; esac;; esac case $ac_abs_builddir in .) ac_abs_top_builddir=${ac_top_builddir}.;; *) case ${ac_top_builddir}. in .) ac_abs_top_builddir=$ac_abs_builddir;; [\\/]* | ?:[\\/]* ) ac_abs_top_builddir=${ac_top_builddir}.;; *) ac_abs_top_builddir=$ac_abs_builddir/${ac_top_builddir}.;; esac;; esac case $ac_abs_builddir in .) ac_abs_srcdir=$ac_srcdir;; *) case $ac_srcdir in .) ac_abs_srcdir=$ac_abs_builddir;; [\\/]* | ?:[\\/]* ) ac_abs_srcdir=$ac_srcdir;; *) ac_abs_srcdir=$ac_abs_builddir/$ac_srcdir;; esac;; esac case $ac_abs_builddir in .) ac_abs_top_srcdir=$ac_top_srcdir;; *) case $ac_top_srcdir in .) ac_abs_top_srcdir=$ac_abs_builddir;; [\\/]* | ?:[\\/]* ) ac_abs_top_srcdir=$ac_top_srcdir;; *) ac_abs_top_srcdir=$ac_abs_builddir/$ac_top_srcdir;; esac;; esac cd $ac_dir # Check for guested configure; otherwise get Cygnus style configure. if test -f $ac_srcdir/configure.gnu; then echo $SHELL $ac_srcdir/configure.gnu --help=recursive elif test -f $ac_srcdir/configure; then echo $SHELL $ac_srcdir/configure --help=recursive elif test -f $ac_srcdir/configure.ac || test -f $ac_srcdir/configure.in; then echo $ac_configure --help else echo "$as_me: WARNING: no configuration information is in $ac_dir" >&2 fi cd $ac_popdir done fi test -n "$ac_init_help" && exit 0 if $ac_init_version; then cat <<\_ACEOF syck configure 0.54 generated by GNU Autoconf 2.59 Copyright (C) 2003 Free Software Foundation, Inc. This configure script is free software; the Free Software Foundation gives unlimited permission to copy, distribute and modify it. _ACEOF exit 0 fi exec 5>config.log cat >&5 <<_ACEOF This file contains any messages produced by compilers while running configure, to aid debugging if configure makes a mistake. It was created by syck $as_me 0.54, which was generated by GNU Autoconf 2.59. Invocation command line was $ $0 $@ _ACEOF { cat <<_ASUNAME ## --------- ## ## Platform. ## ## --------- ## hostname = `(hostname || uname -n) 2>/dev/null | sed 1q` uname -m = `(uname -m) 2>/dev/null || echo unknown` uname -r = `(uname -r) 2>/dev/null || echo unknown` uname -s = `(uname -s) 2>/dev/null || echo unknown` uname -v = `(uname -v) 2>/dev/null || echo unknown` /usr/bin/uname -p = `(/usr/bin/uname -p) 2>/dev/null || echo unknown` /bin/uname -X = `(/bin/uname -X) 2>/dev/null || echo unknown` /bin/arch = `(/bin/arch) 2>/dev/null || echo unknown` /usr/bin/arch -k = `(/usr/bin/arch -k) 2>/dev/null || echo unknown` /usr/convex/getsysinfo = `(/usr/convex/getsysinfo) 2>/dev/null || echo unknown` hostinfo = `(hostinfo) 2>/dev/null || echo unknown` /bin/machine = `(/bin/machine) 2>/dev/null || echo unknown` /usr/bin/oslevel = `(/usr/bin/oslevel) 2>/dev/null || echo unknown` /bin/universe = `(/bin/universe) 2>/dev/null || echo unknown` _ASUNAME as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. echo "PATH: $as_dir" done } >&5 cat >&5 <<_ACEOF ## ----------- ## ## Core tests. ## ## ----------- ## _ACEOF # Keep a trace of the command line. # Strip out --no-create and --no-recursion so they do not pile up. # Strip out --silent because we don't want to record it for future runs. # Also quote any args containing shell meta-characters. # Make two passes to allow for proper duplicate-argument suppression. ac_configure_args= ac_configure_args0= ac_configure_args1= ac_sep= ac_must_keep_next=false for ac_pass in 1 2 do for ac_arg do case $ac_arg in -no-create | --no-c* | -n | -no-recursion | --no-r*) continue ;; -q | -quiet | --quiet | --quie | --qui | --qu | --q \ | -silent | --silent | --silen | --sile | --sil) continue ;; *" "*|*" "*|*[\[\]\~\#\$\^\&\*\(\)\{\}\\\|\;\<\>\?\"\']*) ac_arg=`echo "$ac_arg" | sed "s/'/'\\\\\\\\''/g"` ;; esac case $ac_pass in 1) ac_configure_args0="$ac_configure_args0 '$ac_arg'" ;; 2) ac_configure_args1="$ac_configure_args1 '$ac_arg'" if test $ac_must_keep_next = true; then ac_must_keep_next=false # Got value, back to normal. else case $ac_arg in *=* | --config-cache | -C | -disable-* | --disable-* \ | -enable-* | --enable-* | -gas | --g* | -nfp | --nf* \ | -q | -quiet | --q* | -silent | --sil* | -v | -verb* \ | -with-* | --with-* | -without-* | --without-* | --x) case "$ac_configure_args0 " in "$ac_configure_args1"*" '$ac_arg' "* ) continue ;; esac ;; -* ) ac_must_keep_next=true ;; esac fi ac_configure_args="$ac_configure_args$ac_sep'$ac_arg'" # Get rid of the leading space. ac_sep=" " ;; esac done done $as_unset ac_configure_args0 || test "${ac_configure_args0+set}" != set || { ac_configure_args0=; export ac_configure_args0; } $as_unset ac_configure_args1 || test "${ac_configure_args1+set}" != set || { ac_configure_args1=; export ac_configure_args1; } # When interrupted or exit'd, cleanup temporary files, and complete # config.log. We remove comments because anyway the quotes in there # would cause problems or look ugly. # WARNING: Be sure not to use single quotes in there, as some shells, # such as our DU 5.0 friend, will then `close' the trap. trap 'exit_status=$? # Save into config.log some information that might help in debugging. { echo cat <<\_ASBOX ## ---------------- ## ## Cache variables. ## ## ---------------- ## _ASBOX echo # The following way of writing the cache mishandles newlines in values, { (set) 2>&1 | case `(ac_space='"'"' '"'"'; set | grep ac_space) 2>&1` in *ac_space=\ *) sed -n \ "s/'"'"'/'"'"'\\\\'"'"''"'"'/g; s/^\\([_$as_cr_alnum]*_cv_[_$as_cr_alnum]*\\)=\\(.*\\)/\\1='"'"'\\2'"'"'/p" ;; *) sed -n \ "s/^\\([_$as_cr_alnum]*_cv_[_$as_cr_alnum]*\\)=\\(.*\\)/\\1=\\2/p" ;; esac; } echo cat <<\_ASBOX ## ----------------- ## ## Output variables. ## ## ----------------- ## _ASBOX echo for ac_var in $ac_subst_vars do eval ac_val=$`echo $ac_var` echo "$ac_var='"'"'$ac_val'"'"'" done | sort echo if test -n "$ac_subst_files"; then cat <<\_ASBOX ## ------------- ## ## Output files. ## ## ------------- ## _ASBOX echo for ac_var in $ac_subst_files do eval ac_val=$`echo $ac_var` echo "$ac_var='"'"'$ac_val'"'"'" done | sort echo fi if test -s confdefs.h; then cat <<\_ASBOX ## ----------- ## ## confdefs.h. ## ## ----------- ## _ASBOX echo sed "/^$/d" confdefs.h | sort echo fi test "$ac_signal" != 0 && echo "$as_me: caught signal $ac_signal" echo "$as_me: exit $exit_status" } >&5 rm -f core *.core && rm -rf conftest* confdefs* conf$$* $ac_clean_files && exit $exit_status ' 0 for ac_signal in 1 2 13 15; do trap 'ac_signal='$ac_signal'; { (exit 1); exit 1; }' $ac_signal done ac_signal=0 # confdefs.h avoids OS command line length limits that DEFS can exceed. rm -rf conftest* confdefs.h # AIX cpp loses on an empty file, so make sure it contains at least a newline. echo >confdefs.h # Predefined preprocessor variables. cat >>confdefs.h <<_ACEOF #define PACKAGE_NAME "$PACKAGE_NAME" _ACEOF cat >>confdefs.h <<_ACEOF #define PACKAGE_TARNAME "$PACKAGE_TARNAME" _ACEOF cat >>confdefs.h <<_ACEOF #define PACKAGE_VERSION "$PACKAGE_VERSION" _ACEOF cat >>confdefs.h <<_ACEOF #define PACKAGE_STRING "$PACKAGE_STRING" _ACEOF cat >>confdefs.h <<_ACEOF #define PACKAGE_BUGREPORT "$PACKAGE_BUGREPORT" _ACEOF # Let the site file select an alternate cache file if it wants to. # Prefer explicitly selected file to automatically selected ones. if test -z "$CONFIG_SITE"; then if test "x$prefix" != xNONE; then CONFIG_SITE="$prefix/share/config.site $prefix/etc/config.site" else CONFIG_SITE="$ac_default_prefix/share/config.site $ac_default_prefix/etc/config.site" fi fi for ac_site_file in $CONFIG_SITE; do if test -r "$ac_site_file"; then { echo "$as_me:$LINENO: loading site script $ac_site_file" >&5 echo "$as_me: loading site script $ac_site_file" >&6;} sed 's/^/| /' "$ac_site_file" >&5 . "$ac_site_file" fi done if test -r "$cache_file"; then # Some versions of bash will fail to source /dev/null (special # files actually), so we avoid doing that. if test -f "$cache_file"; then { echo "$as_me:$LINENO: loading cache $cache_file" >&5 echo "$as_me: loading cache $cache_file" >&6;} case $cache_file in [\\/]* | ?:[\\/]* ) . $cache_file;; *) . ./$cache_file;; esac fi else { echo "$as_me:$LINENO: creating cache $cache_file" >&5 echo "$as_me: creating cache $cache_file" >&6;} >$cache_file fi # Check that the precious variables saved in the cache have kept the same # value. ac_cache_corrupted=false for ac_var in `(set) 2>&1 | sed -n 's/^ac_env_\([a-zA-Z_0-9]*\)_set=.*/\1/p'`; do eval ac_old_set=\$ac_cv_env_${ac_var}_set eval ac_new_set=\$ac_env_${ac_var}_set eval ac_old_val="\$ac_cv_env_${ac_var}_value" eval ac_new_val="\$ac_env_${ac_var}_value" case $ac_old_set,$ac_new_set in set,) { echo "$as_me:$LINENO: error: \`$ac_var' was set to \`$ac_old_val' in the previous run" >&5 echo "$as_me: error: \`$ac_var' was set to \`$ac_old_val' in the previous run" >&2;} ac_cache_corrupted=: ;; ,set) { echo "$as_me:$LINENO: error: \`$ac_var' was not set in the previous run" >&5 echo "$as_me: error: \`$ac_var' was not set in the previous run" >&2;} ac_cache_corrupted=: ;; ,);; *) if test "x$ac_old_val" != "x$ac_new_val"; then { echo "$as_me:$LINENO: error: \`$ac_var' has changed since the previous run:" >&5 echo "$as_me: error: \`$ac_var' has changed since the previous run:" >&2;} { echo "$as_me:$LINENO: former value: $ac_old_val" >&5 echo "$as_me: former value: $ac_old_val" >&2;} { echo "$as_me:$LINENO: current value: $ac_new_val" >&5 echo "$as_me: current value: $ac_new_val" >&2;} ac_cache_corrupted=: fi;; esac # Pass precious variables to config.status. if test "$ac_new_set" = set; then case $ac_new_val in *" "*|*" "*|*[\[\]\~\#\$\^\&\*\(\)\{\}\\\|\;\<\>\?\"\']*) ac_arg=$ac_var=`echo "$ac_new_val" | sed "s/'/'\\\\\\\\''/g"` ;; *) ac_arg=$ac_var=$ac_new_val ;; esac case " $ac_configure_args " in *" '$ac_arg' "*) ;; # Avoid dups. Use of quotes ensures accuracy. *) ac_configure_args="$ac_configure_args '$ac_arg'" ;; esac fi done if $ac_cache_corrupted; then { echo "$as_me:$LINENO: error: changes in the environment can compromise the build" >&5 echo "$as_me: error: changes in the environment can compromise the build" >&2;} { { echo "$as_me:$LINENO: error: run \`make distclean' and/or \`rm $cache_file' and start over" >&5 echo "$as_me: error: run \`make distclean' and/or \`rm $cache_file' and start over" >&2;} { (exit 1); exit 1; }; } fi ac_ext=c ac_cpp='$CPP $CPPFLAGS' ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_c_compiler_gnu ac_aux_dir= for ac_dir in config $srcdir/config; do if test -f $ac_dir/install-sh; then ac_aux_dir=$ac_dir ac_install_sh="$ac_aux_dir/install-sh -c" break elif test -f $ac_dir/install.sh; then ac_aux_dir=$ac_dir ac_install_sh="$ac_aux_dir/install.sh -c" break elif test -f $ac_dir/shtool; then ac_aux_dir=$ac_dir ac_install_sh="$ac_aux_dir/shtool install -c" break fi done if test -z "$ac_aux_dir"; then { { echo "$as_me:$LINENO: error: cannot find install-sh or install.sh in config $srcdir/config" >&5 echo "$as_me: error: cannot find install-sh or install.sh in config $srcdir/config" >&2;} { (exit 1); exit 1; }; } fi ac_config_guess="$SHELL $ac_aux_dir/config.guess" ac_config_sub="$SHELL $ac_aux_dir/config.sub" ac_configure="$SHELL $ac_aux_dir/configure" # This should be Cygnus configure. am__api_version="1.9" # Find a good install program. We prefer a C program (faster), # so one script is as good as another. But avoid the broken or # incompatible versions: # SysV /etc/install, /usr/sbin/install # SunOS /usr/etc/install # IRIX /sbin/install # AIX /bin/install # AmigaOS /C/install, which installs bootblocks on floppy discs # AIX 4 /usr/bin/installbsd, which doesn't work without a -g flag # AFS /usr/afsws/bin/install, which mishandles nonexistent args # SVR4 /usr/ucb/install, which tries to use the nonexistent group "staff" # OS/2's system install, which has a completely different semantic # ./install, which can be erroneously created by make from ./install.sh. echo "$as_me:$LINENO: checking for a BSD-compatible install" >&5 echo $ECHO_N "checking for a BSD-compatible install... $ECHO_C" >&6 if test -z "$INSTALL"; then if test "${ac_cv_path_install+set}" = set; then echo $ECHO_N "(cached) $ECHO_C" >&6 else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. # Account for people who put trailing slashes in PATH elements. case $as_dir/ in ./ | .// | /cC/* | \ /etc/* | /usr/sbin/* | /usr/etc/* | /sbin/* | /usr/afsws/bin/* | \ ?:\\/os2\\/install\\/* | ?:\\/OS2\\/INSTALL\\/* | \ /usr/ucb/* ) ;; *) # OSF1 and SCO ODT 3.0 have their own names for install. # Don't use installbsd from OSF since it installs stuff as root # by default. for ac_prog in ginstall scoinst install; do for ac_exec_ext in '' $ac_executable_extensions; do if $as_executable_p "$as_dir/$ac_prog$ac_exec_ext"; then if test $ac_prog = install && grep dspmsg "$as_dir/$ac_prog$ac_exec_ext" >/dev/null 2>&1; then # AIX install. It has an incompatible calling convention. : elif test $ac_prog = install && grep pwplus "$as_dir/$ac_prog$ac_exec_ext" >/dev/null 2>&1; then # program-specific install script used by HP pwplus--don't use. : else ac_cv_path_install="$as_dir/$ac_prog$ac_exec_ext -c" break 3 fi fi done done ;; esac done fi if test "${ac_cv_path_install+set}" = set; then INSTALL=$ac_cv_path_install else # As a last resort, use the slow shell script. We don't cache a # path for INSTALL within a source directory, because that will # break other packages using the cache if that directory is # removed, or if the path is relative. INSTALL=$ac_install_sh fi fi echo "$as_me:$LINENO: result: $INSTALL" >&5 echo "${ECHO_T}$INSTALL" >&6 # Use test -z because SunOS4 sh mishandles braces in ${var-val}. # It thinks the first close brace ends the variable substitution. test -z "$INSTALL_PROGRAM" && INSTALL_PROGRAM='${INSTALL}' test -z "$INSTALL_SCRIPT" && INSTALL_SCRIPT='${INSTALL}' test -z "$INSTALL_DATA" && INSTALL_DATA='${INSTALL} -m 644' echo "$as_me:$LINENO: checking whether build environment is sane" >&5 echo $ECHO_N "checking whether build environment is sane... $ECHO_C" >&6 # Just in case sleep 1 echo timestamp > conftest.file # Do `set' in a subshell so we don't clobber the current shell's # arguments. Must try -L first in case configure is actually a # symlink; some systems play weird games with the mod time of symlinks # (eg FreeBSD returns the mod time of the symlink's containing # directory). if ( set X `ls -Lt $srcdir/configure conftest.file 2> /dev/null` if test "$*" = "X"; then # -L didn't work. set X `ls -t $srcdir/configure conftest.file` fi rm -f conftest.file if test "$*" != "X $srcdir/configure conftest.file" \ && test "$*" != "X conftest.file $srcdir/configure"; then # If neither matched, then we have a broken ls. This can happen # if, for instance, CONFIG_SHELL is bash and it inherits a # broken ls alias from the environment. This has actually # happened. Such a system could not be considered "sane". { { echo "$as_me:$LINENO: error: ls -t appears to fail. Make sure there is not a broken alias in your environment" >&5 echo "$as_me: error: ls -t appears to fail. Make sure there is not a broken alias in your environment" >&2;} { (exit 1); exit 1; }; } fi test "$2" = conftest.file ) then # Ok. : else { { echo "$as_me:$LINENO: error: newly created file is older than distributed files! Check your system clock" >&5 echo "$as_me: error: newly created file is older than distributed files! Check your system clock" >&2;} { (exit 1); exit 1; }; } fi echo "$as_me:$LINENO: result: yes" >&5 echo "${ECHO_T}yes" >&6 test "$program_prefix" != NONE && program_transform_name="s,^,$program_prefix,;$program_transform_name" # Use a double $ so make ignores it. test "$program_suffix" != NONE && program_transform_name="s,\$,$program_suffix,;$program_transform_name" # Double any \ or $. echo might interpret backslashes. # By default was `s,x,x', remove it if useless. cat <<\_ACEOF >conftest.sed s/[\\$]/&&/g;s/;s,x,x,$// _ACEOF program_transform_name=`echo $program_transform_name | sed -f conftest.sed` rm conftest.sed # expand $ac_aux_dir to an absolute path am_aux_dir=`cd $ac_aux_dir && pwd` test x"${MISSING+set}" = xset || MISSING="\${SHELL} $am_aux_dir/missing" # Use eval to expand $SHELL if eval "$MISSING --run true"; then am_missing_run="$MISSING --run " else am_missing_run= { echo "$as_me:$LINENO: WARNING: \`missing' script is too old or missing" >&5 echo "$as_me: WARNING: \`missing' script is too old or missing" >&2;} fi if mkdir -p --version . >/dev/null 2>&1 && test ! -d ./--version; then # We used to keeping the `.' as first argument, in order to # allow $(mkdir_p) to be used without argument. As in # $(mkdir_p) $(somedir) # where $(somedir) is conditionally defined. However this is wrong # for two reasons: # 1. if the package is installed by a user who cannot write `.' # make install will fail, # 2. the above comment should most certainly read # $(mkdir_p) $(DESTDIR)$(somedir) # so it does not work when $(somedir) is undefined and # $(DESTDIR) is not. # To support the latter case, we have to write # test -z "$(somedir)" || $(mkdir_p) $(DESTDIR)$(somedir), # so the `.' trick is pointless. mkdir_p='mkdir -p --' else # On NextStep and OpenStep, the `mkdir' command does not # recognize any option. It will interpret all options as # directories to create, and then abort because `.' already # exists. for d in ./-p ./--version; do test -d $d && rmdir $d done # $(mkinstalldirs) is defined by Automake if mkinstalldirs exists. if test -f "$ac_aux_dir/mkinstalldirs"; then mkdir_p='$(mkinstalldirs)' else mkdir_p='$(install_sh) -d' fi fi for ac_prog in gawk mawk nawk awk do # Extract the first word of "$ac_prog", so it can be a program name with args. set dummy $ac_prog; ac_word=$2 echo "$as_me:$LINENO: checking for $ac_word" >&5 echo $ECHO_N "checking for $ac_word... $ECHO_C" >&6 if test "${ac_cv_prog_AWK+set}" = set; then echo $ECHO_N "(cached) $ECHO_C" >&6 else if test -n "$AWK"; then ac_cv_prog_AWK="$AWK" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if $as_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_AWK="$ac_prog" echo "$as_me:$LINENO: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done fi fi AWK=$ac_cv_prog_AWK if test -n "$AWK"; then echo "$as_me:$LINENO: result: $AWK" >&5 echo "${ECHO_T}$AWK" >&6 else echo "$as_me:$LINENO: result: no" >&5 echo "${ECHO_T}no" >&6 fi test -n "$AWK" && break done echo "$as_me:$LINENO: checking whether ${MAKE-make} sets \$(MAKE)" >&5 echo $ECHO_N "checking whether ${MAKE-make} sets \$(MAKE)... $ECHO_C" >&6 set dummy ${MAKE-make}; ac_make=`echo "$2" | sed 'y,:./+-,___p_,'` if eval "test \"\${ac_cv_prog_make_${ac_make}_set+set}\" = set"; then echo $ECHO_N "(cached) $ECHO_C" >&6 else cat >conftest.make <<\_ACEOF all: @echo 'ac_maketemp="$(MAKE)"' _ACEOF # GNU make sometimes prints "make[1]: Entering...", which would confuse us. eval `${MAKE-make} -f conftest.make 2>/dev/null | grep temp=` if test -n "$ac_maketemp"; then eval ac_cv_prog_make_${ac_make}_set=yes else eval ac_cv_prog_make_${ac_make}_set=no fi rm -f conftest.make fi if eval "test \"`echo '$ac_cv_prog_make_'${ac_make}_set`\" = yes"; then echo "$as_me:$LINENO: result: yes" >&5 echo "${ECHO_T}yes" >&6 SET_MAKE= else echo "$as_me:$LINENO: result: no" >&5 echo "${ECHO_T}no" >&6 SET_MAKE="MAKE=${MAKE-make}" fi rm -rf .tst 2>/dev/null mkdir .tst 2>/dev/null if test -d .tst; then am__leading_dot=. else am__leading_dot=_ fi rmdir .tst 2>/dev/null # test to see if srcdir already configured if test "`cd $srcdir && pwd`" != "`pwd`" && test -f $srcdir/config.status; then { { echo "$as_me:$LINENO: error: source directory already configured; run \"make distclean\" there first" >&5 echo "$as_me: error: source directory already configured; run \"make distclean\" there first" >&2;} { (exit 1); exit 1; }; } fi # test whether we have cygpath if test -z "$CYGPATH_W"; then if (cygpath --version) >/dev/null 2>/dev/null; then CYGPATH_W='cygpath -w' else CYGPATH_W=echo fi fi # Define the identity of the package. PACKAGE=syck VERSION=0.54 cat >>confdefs.h <<_ACEOF #define PACKAGE "$PACKAGE" _ACEOF cat >>confdefs.h <<_ACEOF #define VERSION "$VERSION" _ACEOF # Some tools Automake needs. ACLOCAL=${ACLOCAL-"${am_missing_run}aclocal-${am__api_version}"} AUTOCONF=${AUTOCONF-"${am_missing_run}autoconf"} AUTOMAKE=${AUTOMAKE-"${am_missing_run}automake-${am__api_version}"} AUTOHEADER=${AUTOHEADER-"${am_missing_run}autoheader"} MAKEINFO=${MAKEINFO-"${am_missing_run}makeinfo"} install_sh=${install_sh-"$am_aux_dir/install-sh"} # Installed binaries are usually stripped using `strip' when the user # run `make install-strip'. However `strip' might not be the right # tool to use in cross-compilation environments, therefore Automake # will honor the `STRIP' environment variable to overrule this program. if test "$cross_compiling" != no; then if test -n "$ac_tool_prefix"; then # Extract the first word of "${ac_tool_prefix}strip", so it can be a program name with args. set dummy ${ac_tool_prefix}strip; ac_word=$2 echo "$as_me:$LINENO: checking for $ac_word" >&5 echo $ECHO_N "checking for $ac_word... $ECHO_C" >&6 if test "${ac_cv_prog_STRIP+set}" = set; then echo $ECHO_N "(cached) $ECHO_C" >&6 else if test -n "$STRIP"; then ac_cv_prog_STRIP="$STRIP" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if $as_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_STRIP="${ac_tool_prefix}strip" echo "$as_me:$LINENO: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done fi fi STRIP=$ac_cv_prog_STRIP if test -n "$STRIP"; then echo "$as_me:$LINENO: result: $STRIP" >&5 echo "${ECHO_T}$STRIP" >&6 else echo "$as_me:$LINENO: result: no" >&5 echo "${ECHO_T}no" >&6 fi fi if test -z "$ac_cv_prog_STRIP"; then ac_ct_STRIP=$STRIP # Extract the first word of "strip", so it can be a program name with args. set dummy strip; ac_word=$2 echo "$as_me:$LINENO: checking for $ac_word" >&5 echo $ECHO_N "checking for $ac_word... $ECHO_C" >&6 if test "${ac_cv_prog_ac_ct_STRIP+set}" = set; then echo $ECHO_N "(cached) $ECHO_C" >&6 else if test -n "$ac_ct_STRIP"; then ac_cv_prog_ac_ct_STRIP="$ac_ct_STRIP" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if $as_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_ac_ct_STRIP="strip" echo "$as_me:$LINENO: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done test -z "$ac_cv_prog_ac_ct_STRIP" && ac_cv_prog_ac_ct_STRIP=":" fi fi ac_ct_STRIP=$ac_cv_prog_ac_ct_STRIP if test -n "$ac_ct_STRIP"; then echo "$as_me:$LINENO: result: $ac_ct_STRIP" >&5 echo "${ECHO_T}$ac_ct_STRIP" >&6 else echo "$as_me:$LINENO: result: no" >&5 echo "${ECHO_T}no" >&6 fi STRIP=$ac_ct_STRIP else STRIP="$ac_cv_prog_STRIP" fi fi INSTALL_STRIP_PROGRAM="\${SHELL} \$(install_sh) -c -s" # We need awk for the "check" target. The system "awk" is bad on # some platforms. # Always define AMTAR for backward compatibility. AMTAR=${AMTAR-"${am_missing_run}tar"} am__tar='${AMTAR} chof - "$$tardir"'; am__untar='${AMTAR} xf -' ac_config_headers="$ac_config_headers config.h" # Checks for programs. # Find a good install program. We prefer a C program (faster), # so one script is as good as another. But avoid the broken or # incompatible versions: # SysV /etc/install, /usr/sbin/install # SunOS /usr/etc/install # IRIX /sbin/install # AIX /bin/install # AmigaOS /C/install, which installs bootblocks on floppy discs # AIX 4 /usr/bin/installbsd, which doesn't work without a -g flag # AFS /usr/afsws/bin/install, which mishandles nonexistent args # SVR4 /usr/ucb/install, which tries to use the nonexistent group "staff" # OS/2's system install, which has a completely different semantic # ./install, which can be erroneously created by make from ./install.sh. echo "$as_me:$LINENO: checking for a BSD-compatible install" >&5 echo $ECHO_N "checking for a BSD-compatible install... $ECHO_C" >&6 if test -z "$INSTALL"; then if test "${ac_cv_path_install+set}" = set; then echo $ECHO_N "(cached) $ECHO_C" >&6 else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. # Account for people who put trailing slashes in PATH elements. case $as_dir/ in ./ | .// | /cC/* | \ /etc/* | /usr/sbin/* | /usr/etc/* | /sbin/* | /usr/afsws/bin/* | \ ?:\\/os2\\/install\\/* | ?:\\/OS2\\/INSTALL\\/* | \ /usr/ucb/* ) ;; *) # OSF1 and SCO ODT 3.0 have their own names for install. # Don't use installbsd from OSF since it installs stuff as root # by default. for ac_prog in ginstall scoinst install; do for ac_exec_ext in '' $ac_executable_extensions; do if $as_executable_p "$as_dir/$ac_prog$ac_exec_ext"; then if test $ac_prog = install && grep dspmsg "$as_dir/$ac_prog$ac_exec_ext" >/dev/null 2>&1; then # AIX install. It has an incompatible calling convention. : elif test $ac_prog = install && grep pwplus "$as_dir/$ac_prog$ac_exec_ext" >/dev/null 2>&1; then # program-specific install script used by HP pwplus--don't use. : else ac_cv_path_install="$as_dir/$ac_prog$ac_exec_ext -c" break 3 fi fi done done ;; esac done fi if test "${ac_cv_path_install+set}" = set; then INSTALL=$ac_cv_path_install else # As a last resort, use the slow shell script. We don't cache a # path for INSTALL within a source directory, because that will # break other packages using the cache if that directory is # removed, or if the path is relative. INSTALL=$ac_install_sh fi fi echo "$as_me:$LINENO: result: $INSTALL" >&5 echo "${ECHO_T}$INSTALL" >&6 # Use test -z because SunOS4 sh mishandles braces in ${var-val}. # It thinks the first close brace ends the variable substitution. test -z "$INSTALL_PROGRAM" && INSTALL_PROGRAM='${INSTALL}' test -z "$INSTALL_SCRIPT" && INSTALL_SCRIPT='${INSTALL}' test -z "$INSTALL_DATA" && INSTALL_DATA='${INSTALL} -m 644' echo "$as_me:$LINENO: checking whether ln -s works" >&5 echo $ECHO_N "checking whether ln -s works... $ECHO_C" >&6 LN_S=$as_ln_s if test "$LN_S" = "ln -s"; then echo "$as_me:$LINENO: result: yes" >&5 echo "${ECHO_T}yes" >&6 else echo "$as_me:$LINENO: result: no, using $LN_S" >&5 echo "${ECHO_T}no, using $LN_S" >&6 fi if test -n "$ac_tool_prefix"; then # Extract the first word of "${ac_tool_prefix}ranlib", so it can be a program name with args. set dummy ${ac_tool_prefix}ranlib; ac_word=$2 echo "$as_me:$LINENO: checking for $ac_word" >&5 echo $ECHO_N "checking for $ac_word... $ECHO_C" >&6 if test "${ac_cv_prog_RANLIB+set}" = set; then echo $ECHO_N "(cached) $ECHO_C" >&6 else if test -n "$RANLIB"; then ac_cv_prog_RANLIB="$RANLIB" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if $as_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_RANLIB="${ac_tool_prefix}ranlib" echo "$as_me:$LINENO: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done fi fi RANLIB=$ac_cv_prog_RANLIB if test -n "$RANLIB"; then echo "$as_me:$LINENO: result: $RANLIB" >&5 echo "${ECHO_T}$RANLIB" >&6 else echo "$as_me:$LINENO: result: no" >&5 echo "${ECHO_T}no" >&6 fi fi if test -z "$ac_cv_prog_RANLIB"; then ac_ct_RANLIB=$RANLIB # Extract the first word of "ranlib", so it can be a program name with args. set dummy ranlib; ac_word=$2 echo "$as_me:$LINENO: checking for $ac_word" >&5 echo $ECHO_N "checking for $ac_word... $ECHO_C" >&6 if test "${ac_cv_prog_ac_ct_RANLIB+set}" = set; then echo $ECHO_N "(cached) $ECHO_C" >&6 else if test -n "$ac_ct_RANLIB"; then ac_cv_prog_ac_ct_RANLIB="$ac_ct_RANLIB" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if $as_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_ac_ct_RANLIB="ranlib" echo "$as_me:$LINENO: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done test -z "$ac_cv_prog_ac_ct_RANLIB" && ac_cv_prog_ac_ct_RANLIB=":" fi fi ac_ct_RANLIB=$ac_cv_prog_ac_ct_RANLIB if test -n "$ac_ct_RANLIB"; then echo "$as_me:$LINENO: result: $ac_ct_RANLIB" >&5 echo "${ECHO_T}$ac_ct_RANLIB" >&6 else echo "$as_me:$LINENO: result: no" >&5 echo "${ECHO_T}no" >&6 fi RANLIB=$ac_ct_RANLIB else RANLIB="$ac_cv_prog_RANLIB" fi echo "$as_me:$LINENO: checking whether ${MAKE-make} sets \$(MAKE)" >&5 echo $ECHO_N "checking whether ${MAKE-make} sets \$(MAKE)... $ECHO_C" >&6 set dummy ${MAKE-make}; ac_make=`echo "$2" | sed 'y,:./+-,___p_,'` if eval "test \"\${ac_cv_prog_make_${ac_make}_set+set}\" = set"; then echo $ECHO_N "(cached) $ECHO_C" >&6 else cat >conftest.make <<\_ACEOF all: @echo 'ac_maketemp="$(MAKE)"' _ACEOF # GNU make sometimes prints "make[1]: Entering...", which would confuse us. eval `${MAKE-make} -f conftest.make 2>/dev/null | grep temp=` if test -n "$ac_maketemp"; then eval ac_cv_prog_make_${ac_make}_set=yes else eval ac_cv_prog_make_${ac_make}_set=no fi rm -f conftest.make fi if eval "test \"`echo '$ac_cv_prog_make_'${ac_make}_set`\" = yes"; then echo "$as_me:$LINENO: result: yes" >&5 echo "${ECHO_T}yes" >&6 SET_MAKE= else echo "$as_me:$LINENO: result: no" >&5 echo "${ECHO_T}no" >&6 SET_MAKE="MAKE=${MAKE-make}" fi for ac_prog in gawk mawk nawk awk do # Extract the first word of "$ac_prog", so it can be a program name with args. set dummy $ac_prog; ac_word=$2 echo "$as_me:$LINENO: checking for $ac_word" >&5 echo $ECHO_N "checking for $ac_word... $ECHO_C" >&6 if test "${ac_cv_prog_AWK+set}" = set; then echo $ECHO_N "(cached) $ECHO_C" >&6 else if test -n "$AWK"; then ac_cv_prog_AWK="$AWK" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if $as_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_AWK="$ac_prog" echo "$as_me:$LINENO: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done fi fi AWK=$ac_cv_prog_AWK if test -n "$AWK"; then echo "$as_me:$LINENO: result: $AWK" >&5 echo "${ECHO_T}$AWK" >&6 else echo "$as_me:$LINENO: result: no" >&5 echo "${ECHO_T}no" >&6 fi test -n "$AWK" && break done for ac_prog in 'bison -y' byacc do # Extract the first word of "$ac_prog", so it can be a program name with args. set dummy $ac_prog; ac_word=$2 echo "$as_me:$LINENO: checking for $ac_word" >&5 echo $ECHO_N "checking for $ac_word... $ECHO_C" >&6 if test "${ac_cv_prog_YACC+set}" = set; then echo $ECHO_N "(cached) $ECHO_C" >&6 else if test -n "$YACC"; then ac_cv_prog_YACC="$YACC" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if $as_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_YACC="$ac_prog" echo "$as_me:$LINENO: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done fi fi YACC=$ac_cv_prog_YACC if test -n "$YACC"; then echo "$as_me:$LINENO: result: $YACC" >&5 echo "${ECHO_T}$YACC" >&6 else echo "$as_me:$LINENO: result: no" >&5 echo "${ECHO_T}no" >&6 fi test -n "$YACC" && break done test -n "$YACC" || YACC="yacc" DEPDIR="${am__leading_dot}deps" ac_config_commands="$ac_config_commands depfiles" am_make=${MAKE-make} cat > confinc << 'END' am__doit: @echo done .PHONY: am__doit END # If we don't find an include directive, just comment out the code. echo "$as_me:$LINENO: checking for style of include used by $am_make" >&5 echo $ECHO_N "checking for style of include used by $am_make... $ECHO_C" >&6 am__include="#" am__quote= _am_result=none # First try GNU make style include. echo "include confinc" > confmf # We grep out `Entering directory' and `Leaving directory' # messages which can occur if `w' ends up in MAKEFLAGS. # In particular we don't look at `^make:' because GNU make might # be invoked under some other name (usually "gmake"), in which # case it prints its new name instead of `make'. if test "`$am_make -s -f confmf 2> /dev/null | grep -v 'ing directory'`" = "done"; then am__include=include am__quote= _am_result=GNU fi # Now try BSD make style include. if test "$am__include" = "#"; then echo '.include "confinc"' > confmf if test "`$am_make -s -f confmf 2> /dev/null`" = "done"; then am__include=.include am__quote="\"" _am_result=BSD fi fi echo "$as_me:$LINENO: result: $_am_result" >&5 echo "${ECHO_T}$_am_result" >&6 rm -f confinc confmf # Check whether --enable-dependency-tracking or --disable-dependency-tracking was given. if test "${enable_dependency_tracking+set}" = set; then enableval="$enable_dependency_tracking" fi; if test "x$enable_dependency_tracking" != xno; then am_depcomp="$ac_aux_dir/depcomp" AMDEPBACKSLASH='\' fi if test "x$enable_dependency_tracking" != xno; then AMDEP_TRUE= AMDEP_FALSE='#' else AMDEP_TRUE='#' AMDEP_FALSE= fi ac_ext=c ac_cpp='$CPP $CPPFLAGS' ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_c_compiler_gnu if test -n "$ac_tool_prefix"; then # Extract the first word of "${ac_tool_prefix}gcc", so it can be a program name with args. set dummy ${ac_tool_prefix}gcc; ac_word=$2 echo "$as_me:$LINENO: checking for $ac_word" >&5 echo $ECHO_N "checking for $ac_word... $ECHO_C" >&6 if test "${ac_cv_prog_CC+set}" = set; then echo $ECHO_N "(cached) $ECHO_C" >&6 else if test -n "$CC"; then ac_cv_prog_CC="$CC" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if $as_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_CC="${ac_tool_prefix}gcc" echo "$as_me:$LINENO: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done fi fi CC=$ac_cv_prog_CC if test -n "$CC"; then echo "$as_me:$LINENO: result: $CC" >&5 echo "${ECHO_T}$CC" >&6 else echo "$as_me:$LINENO: result: no" >&5 echo "${ECHO_T}no" >&6 fi fi if test -z "$ac_cv_prog_CC"; then ac_ct_CC=$CC # Extract the first word of "gcc", so it can be a program name with args. set dummy gcc; ac_word=$2 echo "$as_me:$LINENO: checking for $ac_word" >&5 echo $ECHO_N "checking for $ac_word... $ECHO_C" >&6 if test "${ac_cv_prog_ac_ct_CC+set}" = set; then echo $ECHO_N "(cached) $ECHO_C" >&6 else if test -n "$ac_ct_CC"; then ac_cv_prog_ac_ct_CC="$ac_ct_CC" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if $as_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_ac_ct_CC="gcc" echo "$as_me:$LINENO: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done fi fi ac_ct_CC=$ac_cv_prog_ac_ct_CC if test -n "$ac_ct_CC"; then echo "$as_me:$LINENO: result: $ac_ct_CC" >&5 echo "${ECHO_T}$ac_ct_CC" >&6 else echo "$as_me:$LINENO: result: no" >&5 echo "${ECHO_T}no" >&6 fi CC=$ac_ct_CC else CC="$ac_cv_prog_CC" fi if test -z "$CC"; then if test -n "$ac_tool_prefix"; then # Extract the first word of "${ac_tool_prefix}cc", so it can be a program name with args. set dummy ${ac_tool_prefix}cc; ac_word=$2 echo "$as_me:$LINENO: checking for $ac_word" >&5 echo $ECHO_N "checking for $ac_word... $ECHO_C" >&6 if test "${ac_cv_prog_CC+set}" = set; then echo $ECHO_N "(cached) $ECHO_C" >&6 else if test -n "$CC"; then ac_cv_prog_CC="$CC" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if $as_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_CC="${ac_tool_prefix}cc" echo "$as_me:$LINENO: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done fi fi CC=$ac_cv_prog_CC if test -n "$CC"; then echo "$as_me:$LINENO: result: $CC" >&5 echo "${ECHO_T}$CC" >&6 else echo "$as_me:$LINENO: result: no" >&5 echo "${ECHO_T}no" >&6 fi fi if test -z "$ac_cv_prog_CC"; then ac_ct_CC=$CC # Extract the first word of "cc", so it can be a program name with args. set dummy cc; ac_word=$2 echo "$as_me:$LINENO: checking for $ac_word" >&5 echo $ECHO_N "checking for $ac_word... $ECHO_C" >&6 if test "${ac_cv_prog_ac_ct_CC+set}" = set; then echo $ECHO_N "(cached) $ECHO_C" >&6 else if test -n "$ac_ct_CC"; then ac_cv_prog_ac_ct_CC="$ac_ct_CC" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if $as_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_ac_ct_CC="cc" echo "$as_me:$LINENO: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done fi fi ac_ct_CC=$ac_cv_prog_ac_ct_CC if test -n "$ac_ct_CC"; then echo "$as_me:$LINENO: result: $ac_ct_CC" >&5 echo "${ECHO_T}$ac_ct_CC" >&6 else echo "$as_me:$LINENO: result: no" >&5 echo "${ECHO_T}no" >&6 fi CC=$ac_ct_CC else CC="$ac_cv_prog_CC" fi fi if test -z "$CC"; then # Extract the first word of "cc", so it can be a program name with args. set dummy cc; ac_word=$2 echo "$as_me:$LINENO: checking for $ac_word" >&5 echo $ECHO_N "checking for $ac_word... $ECHO_C" >&6 if test "${ac_cv_prog_CC+set}" = set; then echo $ECHO_N "(cached) $ECHO_C" >&6 else if test -n "$CC"; then ac_cv_prog_CC="$CC" # Let the user override the test. else ac_prog_rejected=no as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if $as_executable_p "$as_dir/$ac_word$ac_exec_ext"; then if test "$as_dir/$ac_word$ac_exec_ext" = "/usr/ucb/cc"; then ac_prog_rejected=yes continue fi ac_cv_prog_CC="cc" echo "$as_me:$LINENO: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done if test $ac_prog_rejected = yes; then # We found a bogon in the path, so make sure we never use it. set dummy $ac_cv_prog_CC shift if test $# != 0; then # We chose a different compiler from the bogus one. # However, it has the same basename, so the bogon will be chosen # first if we set CC to just the basename; use the full file name. shift ac_cv_prog_CC="$as_dir/$ac_word${1+' '}$@" fi fi fi fi CC=$ac_cv_prog_CC if test -n "$CC"; then echo "$as_me:$LINENO: result: $CC" >&5 echo "${ECHO_T}$CC" >&6 else echo "$as_me:$LINENO: result: no" >&5 echo "${ECHO_T}no" >&6 fi fi if test -z "$CC"; then if test -n "$ac_tool_prefix"; then for ac_prog in cl do # Extract the first word of "$ac_tool_prefix$ac_prog", so it can be a program name with args. set dummy $ac_tool_prefix$ac_prog; ac_word=$2 echo "$as_me:$LINENO: checking for $ac_word" >&5 echo $ECHO_N "checking for $ac_word... $ECHO_C" >&6 if test "${ac_cv_prog_CC+set}" = set; then echo $ECHO_N "(cached) $ECHO_C" >&6 else if test -n "$CC"; then ac_cv_prog_CC="$CC" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if $as_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_CC="$ac_tool_prefix$ac_prog" echo "$as_me:$LINENO: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done fi fi CC=$ac_cv_prog_CC if test -n "$CC"; then echo "$as_me:$LINENO: result: $CC" >&5 echo "${ECHO_T}$CC" >&6 else echo "$as_me:$LINENO: result: no" >&5 echo "${ECHO_T}no" >&6 fi test -n "$CC" && break done fi if test -z "$CC"; then ac_ct_CC=$CC for ac_prog in cl do # Extract the first word of "$ac_prog", so it can be a program name with args. set dummy $ac_prog; ac_word=$2 echo "$as_me:$LINENO: checking for $ac_word" >&5 echo $ECHO_N "checking for $ac_word... $ECHO_C" >&6 if test "${ac_cv_prog_ac_ct_CC+set}" = set; then echo $ECHO_N "(cached) $ECHO_C" >&6 else if test -n "$ac_ct_CC"; then ac_cv_prog_ac_ct_CC="$ac_ct_CC" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if $as_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_ac_ct_CC="$ac_prog" echo "$as_me:$LINENO: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done fi fi ac_ct_CC=$ac_cv_prog_ac_ct_CC if test -n "$ac_ct_CC"; then echo "$as_me:$LINENO: result: $ac_ct_CC" >&5 echo "${ECHO_T}$ac_ct_CC" >&6 else echo "$as_me:$LINENO: result: no" >&5 echo "${ECHO_T}no" >&6 fi test -n "$ac_ct_CC" && break done CC=$ac_ct_CC fi fi test -z "$CC" && { { echo "$as_me:$LINENO: error: no acceptable C compiler found in \$PATH See \`config.log' for more details." >&5 echo "$as_me: error: no acceptable C compiler found in \$PATH See \`config.log' for more details." >&2;} { (exit 1); exit 1; }; } # Provide some information about the compiler. echo "$as_me:$LINENO:" \ "checking for C compiler version" >&5 ac_compiler=`set X $ac_compile; echo $2` { (eval echo "$as_me:$LINENO: \"$ac_compiler --version &5\"") >&5 (eval $ac_compiler --version &5) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); } { (eval echo "$as_me:$LINENO: \"$ac_compiler -v &5\"") >&5 (eval $ac_compiler -v &5) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); } { (eval echo "$as_me:$LINENO: \"$ac_compiler -V &5\"") >&5 (eval $ac_compiler -V &5) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); } cat >conftest.$ac_ext <<_ACEOF /* confdefs.h. */ _ACEOF cat confdefs.h >>conftest.$ac_ext cat >>conftest.$ac_ext <<_ACEOF /* end confdefs.h. */ int main () { ; return 0; } _ACEOF ac_clean_files_save=$ac_clean_files ac_clean_files="$ac_clean_files a.out a.exe b.out" # Try to create an executable without -o first, disregard a.out. # It will help us diagnose broken compilers, and finding out an intuition # of exeext. echo "$as_me:$LINENO: checking for C compiler default output file name" >&5 echo $ECHO_N "checking for C compiler default output file name... $ECHO_C" >&6 ac_link_default=`echo "$ac_link" | sed 's/ -o *conftest[^ ]*//'` if { (eval echo "$as_me:$LINENO: \"$ac_link_default\"") >&5 (eval $ac_link_default) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; then # Find the output, starting from the most likely. This scheme is # not robust to junk in `.', hence go to wildcards (a.*) only as a last # resort. # Be careful to initialize this variable, since it used to be cached. # Otherwise an old cache value of `no' led to `EXEEXT = no' in a Makefile. ac_cv_exeext= # b.out is created by i960 compilers. for ac_file in a_out.exe a.exe conftest.exe a.out conftest a.* conftest.* b.out do test -f "$ac_file" || continue case $ac_file in *.$ac_ext | *.xcoff | *.tds | *.d | *.pdb | *.xSYM | *.bb | *.bbg | *.o | *.obj ) ;; conftest.$ac_ext ) # This is the source file. ;; [ab].out ) # We found the default executable, but exeext='' is most # certainly right. break;; *.* ) ac_cv_exeext=`expr "$ac_file" : '[^.]*\(\..*\)'` # FIXME: I believe we export ac_cv_exeext for Libtool, # but it would be cool to find out if it's true. Does anybody # maintain Libtool? --akim. export ac_cv_exeext break;; * ) break;; esac done else echo "$as_me: failed program was:" >&5 sed 's/^/| /' conftest.$ac_ext >&5 { { echo "$as_me:$LINENO: error: C compiler cannot create executables See \`config.log' for more details." >&5 echo "$as_me: error: C compiler cannot create executables See \`config.log' for more details." >&2;} { (exit 77); exit 77; }; } fi ac_exeext=$ac_cv_exeext echo "$as_me:$LINENO: result: $ac_file" >&5 echo "${ECHO_T}$ac_file" >&6 # Check the compiler produces executables we can run. If not, either # the compiler is broken, or we cross compile. echo "$as_me:$LINENO: checking whether the C compiler works" >&5 echo $ECHO_N "checking whether the C compiler works... $ECHO_C" >&6 # FIXME: These cross compiler hacks should be removed for Autoconf 3.0 # If not cross compiling, check that we can run a simple program. if test "$cross_compiling" != yes; then if { ac_try='./$ac_file' { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 (eval $ac_try) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; }; then cross_compiling=no else if test "$cross_compiling" = maybe; then cross_compiling=yes else { { echo "$as_me:$LINENO: error: cannot run C compiled programs. If you meant to cross compile, use \`--host'. See \`config.log' for more details." >&5 echo "$as_me: error: cannot run C compiled programs. If you meant to cross compile, use \`--host'. See \`config.log' for more details." >&2;} { (exit 1); exit 1; }; } fi fi fi echo "$as_me:$LINENO: result: yes" >&5 echo "${ECHO_T}yes" >&6 rm -f a.out a.exe conftest$ac_cv_exeext b.out ac_clean_files=$ac_clean_files_save # Check the compiler produces executables we can run. If not, either # the compiler is broken, or we cross compile. echo "$as_me:$LINENO: checking whether we are cross compiling" >&5 echo $ECHO_N "checking whether we are cross compiling... $ECHO_C" >&6 echo "$as_me:$LINENO: result: $cross_compiling" >&5 echo "${ECHO_T}$cross_compiling" >&6 echo "$as_me:$LINENO: checking for suffix of executables" >&5 echo $ECHO_N "checking for suffix of executables... $ECHO_C" >&6 if { (eval echo "$as_me:$LINENO: \"$ac_link\"") >&5 (eval $ac_link) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; then # If both `conftest.exe' and `conftest' are `present' (well, observable) # catch `conftest.exe'. For instance with Cygwin, `ls conftest' will # work properly (i.e., refer to `conftest.exe'), while it won't with # `rm'. for ac_file in conftest.exe conftest conftest.*; do test -f "$ac_file" || continue case $ac_file in *.$ac_ext | *.xcoff | *.tds | *.d | *.pdb | *.xSYM | *.bb | *.bbg | *.o | *.obj ) ;; *.* ) ac_cv_exeext=`expr "$ac_file" : '[^.]*\(\..*\)'` export ac_cv_exeext break;; * ) break;; esac done else { { echo "$as_me:$LINENO: error: cannot compute suffix of executables: cannot compile and link See \`config.log' for more details." >&5 echo "$as_me: error: cannot compute suffix of executables: cannot compile and link See \`config.log' for more details." >&2;} { (exit 1); exit 1; }; } fi rm -f conftest$ac_cv_exeext echo "$as_me:$LINENO: result: $ac_cv_exeext" >&5 echo "${ECHO_T}$ac_cv_exeext" >&6 rm -f conftest.$ac_ext EXEEXT=$ac_cv_exeext ac_exeext=$EXEEXT echo "$as_me:$LINENO: checking for suffix of object files" >&5 echo $ECHO_N "checking for suffix of object files... $ECHO_C" >&6 if test "${ac_cv_objext+set}" = set; then echo $ECHO_N "(cached) $ECHO_C" >&6 else cat >conftest.$ac_ext <<_ACEOF /* confdefs.h. */ _ACEOF cat confdefs.h >>conftest.$ac_ext cat >>conftest.$ac_ext <<_ACEOF /* end confdefs.h. */ int main () { ; return 0; } _ACEOF rm -f conftest.o conftest.obj if { (eval echo "$as_me:$LINENO: \"$ac_compile\"") >&5 (eval $ac_compile) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; then for ac_file in `(ls conftest.o conftest.obj; ls conftest.*) 2>/dev/null`; do case $ac_file in *.$ac_ext | *.xcoff | *.tds | *.d | *.pdb | *.xSYM | *.bb | *.bbg ) ;; *) ac_cv_objext=`expr "$ac_file" : '.*\.\(.*\)'` break;; esac done else echo "$as_me: failed program was:" >&5 sed 's/^/| /' conftest.$ac_ext >&5 { { echo "$as_me:$LINENO: error: cannot compute suffix of object files: cannot compile See \`config.log' for more details." >&5 echo "$as_me: error: cannot compute suffix of object files: cannot compile See \`config.log' for more details." >&2;} { (exit 1); exit 1; }; } fi rm -f conftest.$ac_cv_objext conftest.$ac_ext fi echo "$as_me:$LINENO: result: $ac_cv_objext" >&5 echo "${ECHO_T}$ac_cv_objext" >&6 OBJEXT=$ac_cv_objext ac_objext=$OBJEXT echo "$as_me:$LINENO: checking whether we are using the GNU C compiler" >&5 echo $ECHO_N "checking whether we are using the GNU C compiler... $ECHO_C" >&6 if test "${ac_cv_c_compiler_gnu+set}" = set; then echo $ECHO_N "(cached) $ECHO_C" >&6 else cat >conftest.$ac_ext <<_ACEOF /* confdefs.h. */ _ACEOF cat confdefs.h >>conftest.$ac_ext cat >>conftest.$ac_ext <<_ACEOF /* end confdefs.h. */ int main () { #ifndef __GNUC__ choke me #endif ; return 0; } _ACEOF rm -f conftest.$ac_objext if { (eval echo "$as_me:$LINENO: \"$ac_compile\"") >&5 (eval $ac_compile) 2>conftest.er1 ac_status=$? grep -v '^ *+' conftest.er1 >conftest.err rm -f conftest.er1 cat conftest.err >&5 echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); } && { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 (eval $ac_try) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; } && { ac_try='test -s conftest.$ac_objext' { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 (eval $ac_try) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; }; then ac_compiler_gnu=yes else echo "$as_me: failed program was:" >&5 sed 's/^/| /' conftest.$ac_ext >&5 ac_compiler_gnu=no fi rm -f conftest.err conftest.$ac_objext conftest.$ac_ext ac_cv_c_compiler_gnu=$ac_compiler_gnu fi echo "$as_me:$LINENO: result: $ac_cv_c_compiler_gnu" >&5 echo "${ECHO_T}$ac_cv_c_compiler_gnu" >&6 GCC=`test $ac_compiler_gnu = yes && echo yes` ac_test_CFLAGS=${CFLAGS+set} ac_save_CFLAGS=$CFLAGS CFLAGS="-g" echo "$as_me:$LINENO: checking whether $CC accepts -g" >&5 echo $ECHO_N "checking whether $CC accepts -g... $ECHO_C" >&6 if test "${ac_cv_prog_cc_g+set}" = set; then echo $ECHO_N "(cached) $ECHO_C" >&6 else cat >conftest.$ac_ext <<_ACEOF /* confdefs.h. */ _ACEOF cat confdefs.h >>conftest.$ac_ext cat >>conftest.$ac_ext <<_ACEOF /* end confdefs.h. */ int main () { ; return 0; } _ACEOF rm -f conftest.$ac_objext if { (eval echo "$as_me:$LINENO: \"$ac_compile\"") >&5 (eval $ac_compile) 2>conftest.er1 ac_status=$? grep -v '^ *+' conftest.er1 >conftest.err rm -f conftest.er1 cat conftest.err >&5 echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); } && { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 (eval $ac_try) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; } && { ac_try='test -s conftest.$ac_objext' { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 (eval $ac_try) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; }; then ac_cv_prog_cc_g=yes else echo "$as_me: failed program was:" >&5 sed 's/^/| /' conftest.$ac_ext >&5 ac_cv_prog_cc_g=no fi rm -f conftest.err conftest.$ac_objext conftest.$ac_ext fi echo "$as_me:$LINENO: result: $ac_cv_prog_cc_g" >&5 echo "${ECHO_T}$ac_cv_prog_cc_g" >&6 if test "$ac_test_CFLAGS" = set; then CFLAGS=$ac_save_CFLAGS elif test $ac_cv_prog_cc_g = yes; then if test "$GCC" = yes; then CFLAGS="-g -O2" else CFLAGS="-g" fi else if test "$GCC" = yes; then CFLAGS="-O2" else CFLAGS= fi fi echo "$as_me:$LINENO: checking for $CC option to accept ANSI C" >&5 echo $ECHO_N "checking for $CC option to accept ANSI C... $ECHO_C" >&6 if test "${ac_cv_prog_cc_stdc+set}" = set; then echo $ECHO_N "(cached) $ECHO_C" >&6 else ac_cv_prog_cc_stdc=no ac_save_CC=$CC cat >conftest.$ac_ext <<_ACEOF /* confdefs.h. */ _ACEOF cat confdefs.h >>conftest.$ac_ext cat >>conftest.$ac_ext <<_ACEOF /* end confdefs.h. */ #include #include #include #include /* Most of the following tests are stolen from RCS 5.7's src/conf.sh. */ struct buf { int x; }; FILE * (*rcsopen) (struct buf *, struct stat *, int); static char *e (p, i) char **p; int i; { return p[i]; } static char *f (char * (*g) (char **, int), char **p, ...) { char *s; va_list v; va_start (v,p); s = g (p, va_arg (v,int)); va_end (v); return s; } /* OSF 4.0 Compaq cc is some sort of almost-ANSI by default. It has function prototypes and stuff, but not '\xHH' hex character constants. These don't provoke an error unfortunately, instead are silently treated as 'x'. The following induces an error, until -std1 is added to get proper ANSI mode. Curiously '\x00'!='x' always comes out true, for an array size at least. It's necessary to write '\x00'==0 to get something that's true only with -std1. */ int osf4_cc_array ['\x00' == 0 ? 1 : -1]; int test (int i, double x); struct s1 {int (*f) (int a);}; struct s2 {int (*f) (double a);}; int pairnames (int, char **, FILE *(*)(struct buf *, struct stat *, int), int, int); int argc; char **argv; int main () { return f (e, argv, 0) != argv[0] || f (e, argv, 1) != argv[1]; ; return 0; } _ACEOF # Don't try gcc -ansi; that turns off useful extensions and # breaks some systems' header files. # AIX -qlanglvl=ansi # Ultrix and OSF/1 -std1 # HP-UX 10.20 and later -Ae # HP-UX older versions -Aa -D_HPUX_SOURCE # SVR4 -Xc -D__EXTENSIONS__ for ac_arg in "" -qlanglvl=ansi -std1 -Ae "-Aa -D_HPUX_SOURCE" "-Xc -D__EXTENSIONS__" do CC="$ac_save_CC $ac_arg" rm -f conftest.$ac_objext if { (eval echo "$as_me:$LINENO: \"$ac_compile\"") >&5 (eval $ac_compile) 2>conftest.er1 ac_status=$? grep -v '^ *+' conftest.er1 >conftest.err rm -f conftest.er1 cat conftest.err >&5 echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); } && { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 (eval $ac_try) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; } && { ac_try='test -s conftest.$ac_objext' { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 (eval $ac_try) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; }; then ac_cv_prog_cc_stdc=$ac_arg break else echo "$as_me: failed program was:" >&5 sed 's/^/| /' conftest.$ac_ext >&5 fi rm -f conftest.err conftest.$ac_objext done rm -f conftest.$ac_ext conftest.$ac_objext CC=$ac_save_CC fi case "x$ac_cv_prog_cc_stdc" in x|xno) echo "$as_me:$LINENO: result: none needed" >&5 echo "${ECHO_T}none needed" >&6 ;; *) echo "$as_me:$LINENO: result: $ac_cv_prog_cc_stdc" >&5 echo "${ECHO_T}$ac_cv_prog_cc_stdc" >&6 CC="$CC $ac_cv_prog_cc_stdc" ;; esac # Some people use a C++ compiler to compile C. Since we use `exit', # in C++ we need to declare it. In case someone uses the same compiler # for both compiling C and C++ we need to have the C++ compiler decide # the declaration of exit, since it's the most demanding environment. cat >conftest.$ac_ext <<_ACEOF #ifndef __cplusplus choke me #endif _ACEOF rm -f conftest.$ac_objext if { (eval echo "$as_me:$LINENO: \"$ac_compile\"") >&5 (eval $ac_compile) 2>conftest.er1 ac_status=$? grep -v '^ *+' conftest.er1 >conftest.err rm -f conftest.er1 cat conftest.err >&5 echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); } && { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 (eval $ac_try) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; } && { ac_try='test -s conftest.$ac_objext' { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 (eval $ac_try) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; }; then for ac_declaration in \ '' \ 'extern "C" void std::exit (int) throw (); using std::exit;' \ 'extern "C" void std::exit (int); using std::exit;' \ 'extern "C" void exit (int) throw ();' \ 'extern "C" void exit (int);' \ 'void exit (int);' do cat >conftest.$ac_ext <<_ACEOF /* confdefs.h. */ _ACEOF cat confdefs.h >>conftest.$ac_ext cat >>conftest.$ac_ext <<_ACEOF /* end confdefs.h. */ $ac_declaration #include int main () { exit (42); ; return 0; } _ACEOF rm -f conftest.$ac_objext if { (eval echo "$as_me:$LINENO: \"$ac_compile\"") >&5 (eval $ac_compile) 2>conftest.er1 ac_status=$? grep -v '^ *+' conftest.er1 >conftest.err rm -f conftest.er1 cat conftest.err >&5 echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); } && { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 (eval $ac_try) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; } && { ac_try='test -s conftest.$ac_objext' { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 (eval $ac_try) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; }; then : else echo "$as_me: failed program was:" >&5 sed 's/^/| /' conftest.$ac_ext >&5 continue fi rm -f conftest.err conftest.$ac_objext conftest.$ac_ext cat >conftest.$ac_ext <<_ACEOF /* confdefs.h. */ _ACEOF cat confdefs.h >>conftest.$ac_ext cat >>conftest.$ac_ext <<_ACEOF /* end confdefs.h. */ $ac_declaration int main () { exit (42); ; return 0; } _ACEOF rm -f conftest.$ac_objext if { (eval echo "$as_me:$LINENO: \"$ac_compile\"") >&5 (eval $ac_compile) 2>conftest.er1 ac_status=$? grep -v '^ *+' conftest.er1 >conftest.err rm -f conftest.er1 cat conftest.err >&5 echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); } && { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 (eval $ac_try) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; } && { ac_try='test -s conftest.$ac_objext' { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 (eval $ac_try) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; }; then break else echo "$as_me: failed program was:" >&5 sed 's/^/| /' conftest.$ac_ext >&5 fi rm -f conftest.err conftest.$ac_objext conftest.$ac_ext done rm -f conftest* if test -n "$ac_declaration"; then echo '#ifdef __cplusplus' >>confdefs.h echo $ac_declaration >>confdefs.h echo '#endif' >>confdefs.h fi else echo "$as_me: failed program was:" >&5 sed 's/^/| /' conftest.$ac_ext >&5 fi rm -f conftest.err conftest.$ac_objext conftest.$ac_ext ac_ext=c ac_cpp='$CPP $CPPFLAGS' ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_c_compiler_gnu depcc="$CC" am_compiler_list= echo "$as_me:$LINENO: checking dependency style of $depcc" >&5 echo $ECHO_N "checking dependency style of $depcc... $ECHO_C" >&6 if test "${am_cv_CC_dependencies_compiler_type+set}" = set; then echo $ECHO_N "(cached) $ECHO_C" >&6 else if test -z "$AMDEP_TRUE" && test -f "$am_depcomp"; then # We make a subdir and do the tests there. Otherwise we can end up # making bogus files that we don't know about and never remove. For # instance it was reported that on HP-UX the gcc test will end up # making a dummy file named `D' -- because `-MD' means `put the output # in D'. mkdir conftest.dir # Copy depcomp to subdir because otherwise we won't find it if we're # using a relative directory. cp "$am_depcomp" conftest.dir cd conftest.dir # We will build objects and dependencies in a subdirectory because # it helps to detect inapplicable dependency modes. For instance # both Tru64's cc and ICC support -MD to output dependencies as a # side effect of compilation, but ICC will put the dependencies in # the current directory while Tru64 will put them in the object # directory. mkdir sub am_cv_CC_dependencies_compiler_type=none if test "$am_compiler_list" = ""; then am_compiler_list=`sed -n 's/^#*\([a-zA-Z0-9]*\))$/\1/p' < ./depcomp` fi for depmode in $am_compiler_list; do # Setup a source with many dependencies, because some compilers # like to wrap large dependency lists on column 80 (with \), and # we should not choose a depcomp mode which is confused by this. # # We need to recreate these files for each test, as the compiler may # overwrite some of them when testing with obscure command lines. # This happens at least with the AIX C compiler. : > sub/conftest.c for i in 1 2 3 4 5 6; do echo '#include "conftst'$i'.h"' >> sub/conftest.c # Using `: > sub/conftst$i.h' creates only sub/conftst1.h with # Solaris 8's {/usr,}/bin/sh. touch sub/conftst$i.h done echo "${am__include} ${am__quote}sub/conftest.Po${am__quote}" > confmf case $depmode in nosideeffect) # after this tag, mechanisms are not by side-effect, so they'll # only be used when explicitly requested if test "x$enable_dependency_tracking" = xyes; then continue else break fi ;; none) break ;; esac # We check with `-c' and `-o' for the sake of the "dashmstdout" # mode. It turns out that the SunPro C++ compiler does not properly # handle `-M -o', and we need to detect this. if depmode=$depmode \ source=sub/conftest.c object=sub/conftest.${OBJEXT-o} \ depfile=sub/conftest.Po tmpdepfile=sub/conftest.TPo \ $SHELL ./depcomp $depcc -c -o sub/conftest.${OBJEXT-o} sub/conftest.c \ >/dev/null 2>conftest.err && grep sub/conftst6.h sub/conftest.Po > /dev/null 2>&1 && grep sub/conftest.${OBJEXT-o} sub/conftest.Po > /dev/null 2>&1 && ${MAKE-make} -s -f confmf > /dev/null 2>&1; then # icc doesn't choke on unknown options, it will just issue warnings # or remarks (even with -Werror). So we grep stderr for any message # that says an option was ignored or not supported. # When given -MP, icc 7.0 and 7.1 complain thusly: # icc: Command line warning: ignoring option '-M'; no argument required # The diagnosis changed in icc 8.0: # icc: Command line remark: option '-MP' not supported if (grep 'ignoring option' conftest.err || grep 'not supported' conftest.err) >/dev/null 2>&1; then :; else am_cv_CC_dependencies_compiler_type=$depmode break fi fi done cd .. rm -rf conftest.dir else am_cv_CC_dependencies_compiler_type=none fi fi echo "$as_me:$LINENO: result: $am_cv_CC_dependencies_compiler_type" >&5 echo "${ECHO_T}$am_cv_CC_dependencies_compiler_type" >&6 CCDEPMODE=depmode=$am_cv_CC_dependencies_compiler_type if test "x$enable_dependency_tracking" != xno \ && test "$am_cv_CC_dependencies_compiler_type" = gcc3; then am__fastdepCC_TRUE= am__fastdepCC_FALSE='#' else am__fastdepCC_TRUE='#' am__fastdepCC_FALSE= fi for ac_prog in flex lex do # Extract the first word of "$ac_prog", so it can be a program name with args. set dummy $ac_prog; ac_word=$2 echo "$as_me:$LINENO: checking for $ac_word" >&5 echo $ECHO_N "checking for $ac_word... $ECHO_C" >&6 if test "${ac_cv_prog_LEX+set}" = set; then echo $ECHO_N "(cached) $ECHO_C" >&6 else if test -n "$LEX"; then ac_cv_prog_LEX="$LEX" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if $as_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_LEX="$ac_prog" echo "$as_me:$LINENO: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done fi fi LEX=$ac_cv_prog_LEX if test -n "$LEX"; then echo "$as_me:$LINENO: result: $LEX" >&5 echo "${ECHO_T}$LEX" >&6 else echo "$as_me:$LINENO: result: no" >&5 echo "${ECHO_T}no" >&6 fi test -n "$LEX" && break done test -n "$LEX" || LEX=":" if test -z "$LEXLIB" then echo "$as_me:$LINENO: checking for yywrap in -lfl" >&5 echo $ECHO_N "checking for yywrap in -lfl... $ECHO_C" >&6 if test "${ac_cv_lib_fl_yywrap+set}" = set; then echo $ECHO_N "(cached) $ECHO_C" >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-lfl $LIBS" cat >conftest.$ac_ext <<_ACEOF /* confdefs.h. */ _ACEOF cat confdefs.h >>conftest.$ac_ext cat >>conftest.$ac_ext <<_ACEOF /* end confdefs.h. */ /* Override any gcc2 internal prototype to avoid an error. */ #ifdef __cplusplus extern "C" #endif /* We use char because int might match the return type of a gcc2 builtin and then its argument prototype would still apply. */ char yywrap (); int main () { yywrap (); ; return 0; } _ACEOF rm -f conftest.$ac_objext conftest$ac_exeext if { (eval echo "$as_me:$LINENO: \"$ac_link\"") >&5 (eval $ac_link) 2>conftest.er1 ac_status=$? grep -v '^ *+' conftest.er1 >conftest.err rm -f conftest.er1 cat conftest.err >&5 echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); } && { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 (eval $ac_try) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; } && { ac_try='test -s conftest$ac_exeext' { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 (eval $ac_try) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; }; then ac_cv_lib_fl_yywrap=yes else echo "$as_me: failed program was:" >&5 sed 's/^/| /' conftest.$ac_ext >&5 ac_cv_lib_fl_yywrap=no fi rm -f conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi echo "$as_me:$LINENO: result: $ac_cv_lib_fl_yywrap" >&5 echo "${ECHO_T}$ac_cv_lib_fl_yywrap" >&6 if test $ac_cv_lib_fl_yywrap = yes; then LEXLIB="-lfl" else echo "$as_me:$LINENO: checking for yywrap in -ll" >&5 echo $ECHO_N "checking for yywrap in -ll... $ECHO_C" >&6 if test "${ac_cv_lib_l_yywrap+set}" = set; then echo $ECHO_N "(cached) $ECHO_C" >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-ll $LIBS" cat >conftest.$ac_ext <<_ACEOF /* confdefs.h. */ _ACEOF cat confdefs.h >>conftest.$ac_ext cat >>conftest.$ac_ext <<_ACEOF /* end confdefs.h. */ /* Override any gcc2 internal prototype to avoid an error. */ #ifdef __cplusplus extern "C" #endif /* We use char because int might match the return type of a gcc2 builtin and then its argument prototype would still apply. */ char yywrap (); int main () { yywrap (); ; return 0; } _ACEOF rm -f conftest.$ac_objext conftest$ac_exeext if { (eval echo "$as_me:$LINENO: \"$ac_link\"") >&5 (eval $ac_link) 2>conftest.er1 ac_status=$? grep -v '^ *+' conftest.er1 >conftest.err rm -f conftest.er1 cat conftest.err >&5 echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); } && { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 (eval $ac_try) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; } && { ac_try='test -s conftest$ac_exeext' { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 (eval $ac_try) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; }; then ac_cv_lib_l_yywrap=yes else echo "$as_me: failed program was:" >&5 sed 's/^/| /' conftest.$ac_ext >&5 ac_cv_lib_l_yywrap=no fi rm -f conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi echo "$as_me:$LINENO: result: $ac_cv_lib_l_yywrap" >&5 echo "${ECHO_T}$ac_cv_lib_l_yywrap" >&6 if test $ac_cv_lib_l_yywrap = yes; then LEXLIB="-ll" fi fi fi if test "x$LEX" != "x:"; then echo "$as_me:$LINENO: checking lex output file root" >&5 echo $ECHO_N "checking lex output file root... $ECHO_C" >&6 if test "${ac_cv_prog_lex_root+set}" = set; then echo $ECHO_N "(cached) $ECHO_C" >&6 else # The minimal lex program is just a single line: %%. But some broken lexes # (Solaris, I think it was) want two %% lines, so accommodate them. cat >conftest.l <<_ACEOF %% %% _ACEOF { (eval echo "$as_me:$LINENO: \"$LEX conftest.l\"") >&5 (eval $LEX conftest.l) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); } if test -f lex.yy.c; then ac_cv_prog_lex_root=lex.yy elif test -f lexyy.c; then ac_cv_prog_lex_root=lexyy else { { echo "$as_me:$LINENO: error: cannot find output from $LEX; giving up" >&5 echo "$as_me: error: cannot find output from $LEX; giving up" >&2;} { (exit 1); exit 1; }; } fi fi echo "$as_me:$LINENO: result: $ac_cv_prog_lex_root" >&5 echo "${ECHO_T}$ac_cv_prog_lex_root" >&6 rm -f conftest.l LEX_OUTPUT_ROOT=$ac_cv_prog_lex_root echo "$as_me:$LINENO: checking whether yytext is a pointer" >&5 echo $ECHO_N "checking whether yytext is a pointer... $ECHO_C" >&6 if test "${ac_cv_prog_lex_yytext_pointer+set}" = set; then echo $ECHO_N "(cached) $ECHO_C" >&6 else # POSIX says lex can declare yytext either as a pointer or an array; the # default is implementation-dependent. Figure out which it is, since # not all implementations provide the %pointer and %array declarations. ac_cv_prog_lex_yytext_pointer=no echo 'extern char *yytext;' >>$LEX_OUTPUT_ROOT.c ac_save_LIBS=$LIBS LIBS="$LIBS $LEXLIB" cat >conftest.$ac_ext <<_ACEOF `cat $LEX_OUTPUT_ROOT.c` _ACEOF rm -f conftest.$ac_objext conftest$ac_exeext if { (eval echo "$as_me:$LINENO: \"$ac_link\"") >&5 (eval $ac_link) 2>conftest.er1 ac_status=$? grep -v '^ *+' conftest.er1 >conftest.err rm -f conftest.er1 cat conftest.err >&5 echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); } && { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 (eval $ac_try) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; } && { ac_try='test -s conftest$ac_exeext' { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 (eval $ac_try) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; }; then ac_cv_prog_lex_yytext_pointer=yes else echo "$as_me: failed program was:" >&5 sed 's/^/| /' conftest.$ac_ext >&5 fi rm -f conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_save_LIBS rm -f "${LEX_OUTPUT_ROOT}.c" fi echo "$as_me:$LINENO: result: $ac_cv_prog_lex_yytext_pointer" >&5 echo "${ECHO_T}$ac_cv_prog_lex_yytext_pointer" >&6 if test $ac_cv_prog_lex_yytext_pointer = yes; then cat >>confdefs.h <<\_ACEOF #define YYTEXT_POINTER 1 _ACEOF fi fi if test "$LEX" = :; then LEX=${am_missing_run}flex fi # Checks for libraries. # Checks for header files. ac_ext=c ac_cpp='$CPP $CPPFLAGS' ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_c_compiler_gnu echo "$as_me:$LINENO: checking how to run the C preprocessor" >&5 echo $ECHO_N "checking how to run the C preprocessor... $ECHO_C" >&6 # On Suns, sometimes $CPP names a directory. if test -n "$CPP" && test -d "$CPP"; then CPP= fi if test -z "$CPP"; then if test "${ac_cv_prog_CPP+set}" = set; then echo $ECHO_N "(cached) $ECHO_C" >&6 else # Double quotes because CPP needs to be expanded for CPP in "$CC -E" "$CC -E -traditional-cpp" "/lib/cpp" do ac_preproc_ok=false for ac_c_preproc_warn_flag in '' yes do # Use a header file that comes with gcc, so configuring glibc # with a fresh cross-compiler works. # Prefer to if __STDC__ is defined, since # exists even on freestanding compilers. # On the NeXT, cc -E runs the code through the compiler's parser, # not just through cpp. "Syntax error" is here to catch this case. cat >conftest.$ac_ext <<_ACEOF /* confdefs.h. */ _ACEOF cat confdefs.h >>conftest.$ac_ext cat >>conftest.$ac_ext <<_ACEOF /* end confdefs.h. */ #ifdef __STDC__ # include #else # include #endif Syntax error _ACEOF if { (eval echo "$as_me:$LINENO: \"$ac_cpp conftest.$ac_ext\"") >&5 (eval $ac_cpp conftest.$ac_ext) 2>conftest.er1 ac_status=$? grep -v '^ *+' conftest.er1 >conftest.err rm -f conftest.er1 cat conftest.err >&5 echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); } >/dev/null; then if test -s conftest.err; then ac_cpp_err=$ac_c_preproc_warn_flag ac_cpp_err=$ac_cpp_err$ac_c_werror_flag else ac_cpp_err= fi else ac_cpp_err=yes fi if test -z "$ac_cpp_err"; then : else echo "$as_me: failed program was:" >&5 sed 's/^/| /' conftest.$ac_ext >&5 # Broken: fails on valid input. continue fi rm -f conftest.err conftest.$ac_ext # OK, works on sane cases. Now check whether non-existent headers # can be detected and how. cat >conftest.$ac_ext <<_ACEOF /* confdefs.h. */ _ACEOF cat confdefs.h >>conftest.$ac_ext cat >>conftest.$ac_ext <<_ACEOF /* end confdefs.h. */ #include _ACEOF if { (eval echo "$as_me:$LINENO: \"$ac_cpp conftest.$ac_ext\"") >&5 (eval $ac_cpp conftest.$ac_ext) 2>conftest.er1 ac_status=$? grep -v '^ *+' conftest.er1 >conftest.err rm -f conftest.er1 cat conftest.err >&5 echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); } >/dev/null; then if test -s conftest.err; then ac_cpp_err=$ac_c_preproc_warn_flag ac_cpp_err=$ac_cpp_err$ac_c_werror_flag else ac_cpp_err= fi else ac_cpp_err=yes fi if test -z "$ac_cpp_err"; then # Broken: success on invalid input. continue else echo "$as_me: failed program was:" >&5 sed 's/^/| /' conftest.$ac_ext >&5 # Passes both tests. ac_preproc_ok=: break fi rm -f conftest.err conftest.$ac_ext done # Because of `break', _AC_PREPROC_IFELSE's cleaning code was skipped. rm -f conftest.err conftest.$ac_ext if $ac_preproc_ok; then break fi done ac_cv_prog_CPP=$CPP fi CPP=$ac_cv_prog_CPP else ac_cv_prog_CPP=$CPP fi echo "$as_me:$LINENO: result: $CPP" >&5 echo "${ECHO_T}$CPP" >&6 ac_preproc_ok=false for ac_c_preproc_warn_flag in '' yes do # Use a header file that comes with gcc, so configuring glibc # with a fresh cross-compiler works. # Prefer to if __STDC__ is defined, since # exists even on freestanding compilers. # On the NeXT, cc -E runs the code through the compiler's parser, # not just through cpp. "Syntax error" is here to catch this case. cat >conftest.$ac_ext <<_ACEOF /* confdefs.h. */ _ACEOF cat confdefs.h >>conftest.$ac_ext cat >>conftest.$ac_ext <<_ACEOF /* end confdefs.h. */ #ifdef __STDC__ # include #else # include #endif Syntax error _ACEOF if { (eval echo "$as_me:$LINENO: \"$ac_cpp conftest.$ac_ext\"") >&5 (eval $ac_cpp conftest.$ac_ext) 2>conftest.er1 ac_status=$? grep -v '^ *+' conftest.er1 >conftest.err rm -f conftest.er1 cat conftest.err >&5 echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); } >/dev/null; then if test -s conftest.err; then ac_cpp_err=$ac_c_preproc_warn_flag ac_cpp_err=$ac_cpp_err$ac_c_werror_flag else ac_cpp_err= fi else ac_cpp_err=yes fi if test -z "$ac_cpp_err"; then : else echo "$as_me: failed program was:" >&5 sed 's/^/| /' conftest.$ac_ext >&5 # Broken: fails on valid input. continue fi rm -f conftest.err conftest.$ac_ext # OK, works on sane cases. Now check whether non-existent headers # can be detected and how. cat >conftest.$ac_ext <<_ACEOF /* confdefs.h. */ _ACEOF cat confdefs.h >>conftest.$ac_ext cat >>conftest.$ac_ext <<_ACEOF /* end confdefs.h. */ #include _ACEOF if { (eval echo "$as_me:$LINENO: \"$ac_cpp conftest.$ac_ext\"") >&5 (eval $ac_cpp conftest.$ac_ext) 2>conftest.er1 ac_status=$? grep -v '^ *+' conftest.er1 >conftest.err rm -f conftest.er1 cat conftest.err >&5 echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); } >/dev/null; then if test -s conftest.err; then ac_cpp_err=$ac_c_preproc_warn_flag ac_cpp_err=$ac_cpp_err$ac_c_werror_flag else ac_cpp_err= fi else ac_cpp_err=yes fi if test -z "$ac_cpp_err"; then # Broken: success on invalid input. continue else echo "$as_me: failed program was:" >&5 sed 's/^/| /' conftest.$ac_ext >&5 # Passes both tests. ac_preproc_ok=: break fi rm -f conftest.err conftest.$ac_ext done # Because of `break', _AC_PREPROC_IFELSE's cleaning code was skipped. rm -f conftest.err conftest.$ac_ext if $ac_preproc_ok; then : else { { echo "$as_me:$LINENO: error: C preprocessor \"$CPP\" fails sanity check See \`config.log' for more details." >&5 echo "$as_me: error: C preprocessor \"$CPP\" fails sanity check See \`config.log' for more details." >&2;} { (exit 1); exit 1; }; } fi ac_ext=c ac_cpp='$CPP $CPPFLAGS' ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_c_compiler_gnu echo "$as_me:$LINENO: checking for egrep" >&5 echo $ECHO_N "checking for egrep... $ECHO_C" >&6 if test "${ac_cv_prog_egrep+set}" = set; then echo $ECHO_N "(cached) $ECHO_C" >&6 else if echo a | (grep -E '(a|b)') >/dev/null 2>&1 then ac_cv_prog_egrep='grep -E' else ac_cv_prog_egrep='egrep' fi fi echo "$as_me:$LINENO: result: $ac_cv_prog_egrep" >&5 echo "${ECHO_T}$ac_cv_prog_egrep" >&6 EGREP=$ac_cv_prog_egrep echo "$as_me:$LINENO: checking for ANSI C header files" >&5 echo $ECHO_N "checking for ANSI C header files... $ECHO_C" >&6 if test "${ac_cv_header_stdc+set}" = set; then echo $ECHO_N "(cached) $ECHO_C" >&6 else cat >conftest.$ac_ext <<_ACEOF /* confdefs.h. */ _ACEOF cat confdefs.h >>conftest.$ac_ext cat >>conftest.$ac_ext <<_ACEOF /* end confdefs.h. */ #include #include #include #include int main () { ; return 0; } _ACEOF rm -f conftest.$ac_objext if { (eval echo "$as_me:$LINENO: \"$ac_compile\"") >&5 (eval $ac_compile) 2>conftest.er1 ac_status=$? grep -v '^ *+' conftest.er1 >conftest.err rm -f conftest.er1 cat conftest.err >&5 echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); } && { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 (eval $ac_try) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; } && { ac_try='test -s conftest.$ac_objext' { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 (eval $ac_try) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; }; then ac_cv_header_stdc=yes else echo "$as_me: failed program was:" >&5 sed 's/^/| /' conftest.$ac_ext >&5 ac_cv_header_stdc=no fi rm -f conftest.err conftest.$ac_objext conftest.$ac_ext if test $ac_cv_header_stdc = yes; then # SunOS 4.x string.h does not declare mem*, contrary to ANSI. cat >conftest.$ac_ext <<_ACEOF /* confdefs.h. */ _ACEOF cat confdefs.h >>conftest.$ac_ext cat >>conftest.$ac_ext <<_ACEOF /* end confdefs.h. */ #include _ACEOF if (eval "$ac_cpp conftest.$ac_ext") 2>&5 | $EGREP "memchr" >/dev/null 2>&1; then : else ac_cv_header_stdc=no fi rm -f conftest* fi if test $ac_cv_header_stdc = yes; then # ISC 2.0.2 stdlib.h does not declare free, contrary to ANSI. cat >conftest.$ac_ext <<_ACEOF /* confdefs.h. */ _ACEOF cat confdefs.h >>conftest.$ac_ext cat >>conftest.$ac_ext <<_ACEOF /* end confdefs.h. */ #include _ACEOF if (eval "$ac_cpp conftest.$ac_ext") 2>&5 | $EGREP "free" >/dev/null 2>&1; then : else ac_cv_header_stdc=no fi rm -f conftest* fi if test $ac_cv_header_stdc = yes; then # /bin/cc in Irix-4.0.5 gets non-ANSI ctype macros unless using -ansi. if test "$cross_compiling" = yes; then : else cat >conftest.$ac_ext <<_ACEOF /* confdefs.h. */ _ACEOF cat confdefs.h >>conftest.$ac_ext cat >>conftest.$ac_ext <<_ACEOF /* end confdefs.h. */ #include #if ((' ' & 0x0FF) == 0x020) # define ISLOWER(c) ('a' <= (c) && (c) <= 'z') # define TOUPPER(c) (ISLOWER(c) ? 'A' + ((c) - 'a') : (c)) #else # define ISLOWER(c) \ (('a' <= (c) && (c) <= 'i') \ || ('j' <= (c) && (c) <= 'r') \ || ('s' <= (c) && (c) <= 'z')) # define TOUPPER(c) (ISLOWER(c) ? ((c) | 0x40) : (c)) #endif #define XOR(e, f) (((e) && !(f)) || (!(e) && (f))) int main () { int i; for (i = 0; i < 256; i++) if (XOR (islower (i), ISLOWER (i)) || toupper (i) != TOUPPER (i)) exit(2); exit (0); } _ACEOF rm -f conftest$ac_exeext if { (eval echo "$as_me:$LINENO: \"$ac_link\"") >&5 (eval $ac_link) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); } && { ac_try='./conftest$ac_exeext' { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 (eval $ac_try) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; }; then : else echo "$as_me: program exited with status $ac_status" >&5 echo "$as_me: failed program was:" >&5 sed 's/^/| /' conftest.$ac_ext >&5 ( exit $ac_status ) ac_cv_header_stdc=no fi rm -f core *.core gmon.out bb.out conftest$ac_exeext conftest.$ac_objext conftest.$ac_ext fi fi fi echo "$as_me:$LINENO: result: $ac_cv_header_stdc" >&5 echo "${ECHO_T}$ac_cv_header_stdc" >&6 if test $ac_cv_header_stdc = yes; then cat >>confdefs.h <<\_ACEOF #define STDC_HEADERS 1 _ACEOF fi # On IRIX 5.3, sys/types and inttypes.h are conflicting. for ac_header in sys/types.h sys/stat.h stdlib.h string.h memory.h strings.h \ inttypes.h stdint.h unistd.h do as_ac_Header=`echo "ac_cv_header_$ac_header" | $as_tr_sh` echo "$as_me:$LINENO: checking for $ac_header" >&5 echo $ECHO_N "checking for $ac_header... $ECHO_C" >&6 if eval "test \"\${$as_ac_Header+set}\" = set"; then echo $ECHO_N "(cached) $ECHO_C" >&6 else cat >conftest.$ac_ext <<_ACEOF /* confdefs.h. */ _ACEOF cat confdefs.h >>conftest.$ac_ext cat >>conftest.$ac_ext <<_ACEOF /* end confdefs.h. */ $ac_includes_default #include <$ac_header> _ACEOF rm -f conftest.$ac_objext if { (eval echo "$as_me:$LINENO: \"$ac_compile\"") >&5 (eval $ac_compile) 2>conftest.er1 ac_status=$? grep -v '^ *+' conftest.er1 >conftest.err rm -f conftest.er1 cat conftest.err >&5 echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); } && { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 (eval $ac_try) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; } && { ac_try='test -s conftest.$ac_objext' { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 (eval $ac_try) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; }; then eval "$as_ac_Header=yes" else echo "$as_me: failed program was:" >&5 sed 's/^/| /' conftest.$ac_ext >&5 eval "$as_ac_Header=no" fi rm -f conftest.err conftest.$ac_objext conftest.$ac_ext fi echo "$as_me:$LINENO: result: `eval echo '${'$as_ac_Header'}'`" >&5 echo "${ECHO_T}`eval echo '${'$as_ac_Header'}'`" >&6 if test `eval echo '${'$as_ac_Header'}'` = yes; then cat >>confdefs.h <<_ACEOF #define `echo "HAVE_$ac_header" | $as_tr_cpp` 1 _ACEOF fi done for ac_header in alloca.h stdlib.h string.h do as_ac_Header=`echo "ac_cv_header_$ac_header" | $as_tr_sh` if eval "test \"\${$as_ac_Header+set}\" = set"; then echo "$as_me:$LINENO: checking for $ac_header" >&5 echo $ECHO_N "checking for $ac_header... $ECHO_C" >&6 if eval "test \"\${$as_ac_Header+set}\" = set"; then echo $ECHO_N "(cached) $ECHO_C" >&6 fi echo "$as_me:$LINENO: result: `eval echo '${'$as_ac_Header'}'`" >&5 echo "${ECHO_T}`eval echo '${'$as_ac_Header'}'`" >&6 else # Is the header compilable? echo "$as_me:$LINENO: checking $ac_header usability" >&5 echo $ECHO_N "checking $ac_header usability... $ECHO_C" >&6 cat >conftest.$ac_ext <<_ACEOF /* confdefs.h. */ _ACEOF cat confdefs.h >>conftest.$ac_ext cat >>conftest.$ac_ext <<_ACEOF /* end confdefs.h. */ $ac_includes_default #include <$ac_header> _ACEOF rm -f conftest.$ac_objext if { (eval echo "$as_me:$LINENO: \"$ac_compile\"") >&5 (eval $ac_compile) 2>conftest.er1 ac_status=$? grep -v '^ *+' conftest.er1 >conftest.err rm -f conftest.er1 cat conftest.err >&5 echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); } && { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 (eval $ac_try) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; } && { ac_try='test -s conftest.$ac_objext' { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 (eval $ac_try) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; }; then ac_header_compiler=yes else echo "$as_me: failed program was:" >&5 sed 's/^/| /' conftest.$ac_ext >&5 ac_header_compiler=no fi rm -f conftest.err conftest.$ac_objext conftest.$ac_ext echo "$as_me:$LINENO: result: $ac_header_compiler" >&5 echo "${ECHO_T}$ac_header_compiler" >&6 # Is the header present? echo "$as_me:$LINENO: checking $ac_header presence" >&5 echo $ECHO_N "checking $ac_header presence... $ECHO_C" >&6 cat >conftest.$ac_ext <<_ACEOF /* confdefs.h. */ _ACEOF cat confdefs.h >>conftest.$ac_ext cat >>conftest.$ac_ext <<_ACEOF /* end confdefs.h. */ #include <$ac_header> _ACEOF if { (eval echo "$as_me:$LINENO: \"$ac_cpp conftest.$ac_ext\"") >&5 (eval $ac_cpp conftest.$ac_ext) 2>conftest.er1 ac_status=$? grep -v '^ *+' conftest.er1 >conftest.err rm -f conftest.er1 cat conftest.err >&5 echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); } >/dev/null; then if test -s conftest.err; then ac_cpp_err=$ac_c_preproc_warn_flag ac_cpp_err=$ac_cpp_err$ac_c_werror_flag else ac_cpp_err= fi else ac_cpp_err=yes fi if test -z "$ac_cpp_err"; then ac_header_preproc=yes else echo "$as_me: failed program was:" >&5 sed 's/^/| /' conftest.$ac_ext >&5 ac_header_preproc=no fi rm -f conftest.err conftest.$ac_ext echo "$as_me:$LINENO: result: $ac_header_preproc" >&5 echo "${ECHO_T}$ac_header_preproc" >&6 # So? What about this header? case $ac_header_compiler:$ac_header_preproc:$ac_c_preproc_warn_flag in yes:no: ) { echo "$as_me:$LINENO: WARNING: $ac_header: accepted by the compiler, rejected by the preprocessor!" >&5 echo "$as_me: WARNING: $ac_header: accepted by the compiler, rejected by the preprocessor!" >&2;} { echo "$as_me:$LINENO: WARNING: $ac_header: proceeding with the compiler's result" >&5 echo "$as_me: WARNING: $ac_header: proceeding with the compiler's result" >&2;} ac_header_preproc=yes ;; no:yes:* ) { echo "$as_me:$LINENO: WARNING: $ac_header: present but cannot be compiled" >&5 echo "$as_me: WARNING: $ac_header: present but cannot be compiled" >&2;} { echo "$as_me:$LINENO: WARNING: $ac_header: check for missing prerequisite headers?" >&5 echo "$as_me: WARNING: $ac_header: check for missing prerequisite headers?" >&2;} { echo "$as_me:$LINENO: WARNING: $ac_header: see the Autoconf documentation" >&5 echo "$as_me: WARNING: $ac_header: see the Autoconf documentation" >&2;} { echo "$as_me:$LINENO: WARNING: $ac_header: section \"Present But Cannot Be Compiled\"" >&5 echo "$as_me: WARNING: $ac_header: section \"Present But Cannot Be Compiled\"" >&2;} { echo "$as_me:$LINENO: WARNING: $ac_header: proceeding with the preprocessor's result" >&5 echo "$as_me: WARNING: $ac_header: proceeding with the preprocessor's result" >&2;} { echo "$as_me:$LINENO: WARNING: $ac_header: in the future, the compiler will take precedence" >&5 echo "$as_me: WARNING: $ac_header: in the future, the compiler will take precedence" >&2;} ( cat <<\_ASBOX ## ------------------------------- ## ## Report this to the syck lists. ## ## ------------------------------- ## _ASBOX ) | sed "s/^/$as_me: WARNING: /" >&2 ;; esac echo "$as_me:$LINENO: checking for $ac_header" >&5 echo $ECHO_N "checking for $ac_header... $ECHO_C" >&6 if eval "test \"\${$as_ac_Header+set}\" = set"; then echo $ECHO_N "(cached) $ECHO_C" >&6 else eval "$as_ac_Header=\$ac_header_preproc" fi echo "$as_me:$LINENO: result: `eval echo '${'$as_ac_Header'}'`" >&5 echo "${ECHO_T}`eval echo '${'$as_ac_Header'}'`" >&6 fi if test `eval echo '${'$as_ac_Header'}'` = yes; then cat >>confdefs.h <<_ACEOF #define `echo "HAVE_$ac_header" | $as_tr_cpp` 1 _ACEOF fi done echo "$as_me:$LINENO: checking for int" >&5 echo $ECHO_N "checking for int... $ECHO_C" >&6 if test "${ac_cv_type_int+set}" = set; then echo $ECHO_N "(cached) $ECHO_C" >&6 else cat >conftest.$ac_ext <<_ACEOF /* confdefs.h. */ _ACEOF cat confdefs.h >>conftest.$ac_ext cat >>conftest.$ac_ext <<_ACEOF /* end confdefs.h. */ $ac_includes_default int main () { if ((int *) 0) return 0; if (sizeof (int)) return 0; ; return 0; } _ACEOF rm -f conftest.$ac_objext if { (eval echo "$as_me:$LINENO: \"$ac_compile\"") >&5 (eval $ac_compile) 2>conftest.er1 ac_status=$? grep -v '^ *+' conftest.er1 >conftest.err rm -f conftest.er1 cat conftest.err >&5 echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); } && { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 (eval $ac_try) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; } && { ac_try='test -s conftest.$ac_objext' { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 (eval $ac_try) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; }; then ac_cv_type_int=yes else echo "$as_me: failed program was:" >&5 sed 's/^/| /' conftest.$ac_ext >&5 ac_cv_type_int=no fi rm -f conftest.err conftest.$ac_objext conftest.$ac_ext fi echo "$as_me:$LINENO: result: $ac_cv_type_int" >&5 echo "${ECHO_T}$ac_cv_type_int" >&6 echo "$as_me:$LINENO: checking size of int" >&5 echo $ECHO_N "checking size of int... $ECHO_C" >&6 if test "${ac_cv_sizeof_int+set}" = set; then echo $ECHO_N "(cached) $ECHO_C" >&6 else if test "$ac_cv_type_int" = yes; then # The cast to unsigned long works around a bug in the HP C Compiler # version HP92453-01 B.11.11.23709.GP, which incorrectly rejects # declarations like `int a3[[(sizeof (unsigned char)) >= 0]];'. # This bug is HP SR number 8606223364. if test "$cross_compiling" = yes; then # Depending upon the size, compute the lo and hi bounds. cat >conftest.$ac_ext <<_ACEOF /* confdefs.h. */ _ACEOF cat confdefs.h >>conftest.$ac_ext cat >>conftest.$ac_ext <<_ACEOF /* end confdefs.h. */ $ac_includes_default int main () { static int test_array [1 - 2 * !(((long) (sizeof (int))) >= 0)]; test_array [0] = 0 ; return 0; } _ACEOF rm -f conftest.$ac_objext if { (eval echo "$as_me:$LINENO: \"$ac_compile\"") >&5 (eval $ac_compile) 2>conftest.er1 ac_status=$? grep -v '^ *+' conftest.er1 >conftest.err rm -f conftest.er1 cat conftest.err >&5 echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); } && { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 (eval $ac_try) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; } && { ac_try='test -s conftest.$ac_objext' { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 (eval $ac_try) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; }; then ac_lo=0 ac_mid=0 while :; do cat >conftest.$ac_ext <<_ACEOF /* confdefs.h. */ _ACEOF cat confdefs.h >>conftest.$ac_ext cat >>conftest.$ac_ext <<_ACEOF /* end confdefs.h. */ $ac_includes_default int main () { static int test_array [1 - 2 * !(((long) (sizeof (int))) <= $ac_mid)]; test_array [0] = 0 ; return 0; } _ACEOF rm -f conftest.$ac_objext if { (eval echo "$as_me:$LINENO: \"$ac_compile\"") >&5 (eval $ac_compile) 2>conftest.er1 ac_status=$? grep -v '^ *+' conftest.er1 >conftest.err rm -f conftest.er1 cat conftest.err >&5 echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); } && { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 (eval $ac_try) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; } && { ac_try='test -s conftest.$ac_objext' { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 (eval $ac_try) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; }; then ac_hi=$ac_mid; break else echo "$as_me: failed program was:" >&5 sed 's/^/| /' conftest.$ac_ext >&5 ac_lo=`expr $ac_mid + 1` if test $ac_lo -le $ac_mid; then ac_lo= ac_hi= break fi ac_mid=`expr 2 '*' $ac_mid + 1` fi rm -f conftest.err conftest.$ac_objext conftest.$ac_ext done else echo "$as_me: failed program was:" >&5 sed 's/^/| /' conftest.$ac_ext >&5 cat >conftest.$ac_ext <<_ACEOF /* confdefs.h. */ _ACEOF cat confdefs.h >>conftest.$ac_ext cat >>conftest.$ac_ext <<_ACEOF /* end confdefs.h. */ $ac_includes_default int main () { static int test_array [1 - 2 * !(((long) (sizeof (int))) < 0)]; test_array [0] = 0 ; return 0; } _ACEOF rm -f conftest.$ac_objext if { (eval echo "$as_me:$LINENO: \"$ac_compile\"") >&5 (eval $ac_compile) 2>conftest.er1 ac_status=$? grep -v '^ *+' conftest.er1 >conftest.err rm -f conftest.er1 cat conftest.err >&5 echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); } && { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 (eval $ac_try) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; } && { ac_try='test -s conftest.$ac_objext' { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 (eval $ac_try) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; }; then ac_hi=-1 ac_mid=-1 while :; do cat >conftest.$ac_ext <<_ACEOF /* confdefs.h. */ _ACEOF cat confdefs.h >>conftest.$ac_ext cat >>conftest.$ac_ext <<_ACEOF /* end confdefs.h. */ $ac_includes_default int main () { static int test_array [1 - 2 * !(((long) (sizeof (int))) >= $ac_mid)]; test_array [0] = 0 ; return 0; } _ACEOF rm -f conftest.$ac_objext if { (eval echo "$as_me:$LINENO: \"$ac_compile\"") >&5 (eval $ac_compile) 2>conftest.er1 ac_status=$? grep -v '^ *+' conftest.er1 >conftest.err rm -f conftest.er1 cat conftest.err >&5 echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); } && { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 (eval $ac_try) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; } && { ac_try='test -s conftest.$ac_objext' { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 (eval $ac_try) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; }; then ac_lo=$ac_mid; break else echo "$as_me: failed program was:" >&5 sed 's/^/| /' conftest.$ac_ext >&5 ac_hi=`expr '(' $ac_mid ')' - 1` if test $ac_mid -le $ac_hi; then ac_lo= ac_hi= break fi ac_mid=`expr 2 '*' $ac_mid` fi rm -f conftest.err conftest.$ac_objext conftest.$ac_ext done else echo "$as_me: failed program was:" >&5 sed 's/^/| /' conftest.$ac_ext >&5 ac_lo= ac_hi= fi rm -f conftest.err conftest.$ac_objext conftest.$ac_ext fi rm -f conftest.err conftest.$ac_objext conftest.$ac_ext # Binary search between lo and hi bounds. while test "x$ac_lo" != "x$ac_hi"; do ac_mid=`expr '(' $ac_hi - $ac_lo ')' / 2 + $ac_lo` cat >conftest.$ac_ext <<_ACEOF /* confdefs.h. */ _ACEOF cat confdefs.h >>conftest.$ac_ext cat >>conftest.$ac_ext <<_ACEOF /* end confdefs.h. */ $ac_includes_default int main () { static int test_array [1 - 2 * !(((long) (sizeof (int))) <= $ac_mid)]; test_array [0] = 0 ; return 0; } _ACEOF rm -f conftest.$ac_objext if { (eval echo "$as_me:$LINENO: \"$ac_compile\"") >&5 (eval $ac_compile) 2>conftest.er1 ac_status=$? grep -v '^ *+' conftest.er1 >conftest.err rm -f conftest.er1 cat conftest.err >&5 echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); } && { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 (eval $ac_try) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; } && { ac_try='test -s conftest.$ac_objext' { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 (eval $ac_try) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; }; then ac_hi=$ac_mid else echo "$as_me: failed program was:" >&5 sed 's/^/| /' conftest.$ac_ext >&5 ac_lo=`expr '(' $ac_mid ')' + 1` fi rm -f conftest.err conftest.$ac_objext conftest.$ac_ext done case $ac_lo in ?*) ac_cv_sizeof_int=$ac_lo;; '') { { echo "$as_me:$LINENO: error: cannot compute sizeof (int), 77 See \`config.log' for more details." >&5 echo "$as_me: error: cannot compute sizeof (int), 77 See \`config.log' for more details." >&2;} { (exit 1); exit 1; }; } ;; esac else if test "$cross_compiling" = yes; then { { echo "$as_me:$LINENO: error: cannot run test program while cross compiling See \`config.log' for more details." >&5 echo "$as_me: error: cannot run test program while cross compiling See \`config.log' for more details." >&2;} { (exit 1); exit 1; }; } else cat >conftest.$ac_ext <<_ACEOF /* confdefs.h. */ _ACEOF cat confdefs.h >>conftest.$ac_ext cat >>conftest.$ac_ext <<_ACEOF /* end confdefs.h. */ $ac_includes_default long longval () { return (long) (sizeof (int)); } unsigned long ulongval () { return (long) (sizeof (int)); } #include #include int main () { FILE *f = fopen ("conftest.val", "w"); if (! f) exit (1); if (((long) (sizeof (int))) < 0) { long i = longval (); if (i != ((long) (sizeof (int)))) exit (1); fprintf (f, "%ld\n", i); } else { unsigned long i = ulongval (); if (i != ((long) (sizeof (int)))) exit (1); fprintf (f, "%lu\n", i); } exit (ferror (f) || fclose (f) != 0); ; return 0; } _ACEOF rm -f conftest$ac_exeext if { (eval echo "$as_me:$LINENO: \"$ac_link\"") >&5 (eval $ac_link) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); } && { ac_try='./conftest$ac_exeext' { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 (eval $ac_try) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; }; then ac_cv_sizeof_int=`cat conftest.val` else echo "$as_me: program exited with status $ac_status" >&5 echo "$as_me: failed program was:" >&5 sed 's/^/| /' conftest.$ac_ext >&5 ( exit $ac_status ) { { echo "$as_me:$LINENO: error: cannot compute sizeof (int), 77 See \`config.log' for more details." >&5 echo "$as_me: error: cannot compute sizeof (int), 77 See \`config.log' for more details." >&2;} { (exit 1); exit 1; }; } fi rm -f core *.core gmon.out bb.out conftest$ac_exeext conftest.$ac_objext conftest.$ac_ext fi fi rm -f conftest.val else ac_cv_sizeof_int=0 fi fi echo "$as_me:$LINENO: result: $ac_cv_sizeof_int" >&5 echo "${ECHO_T}$ac_cv_sizeof_int" >&6 cat >>confdefs.h <<_ACEOF #define SIZEOF_INT $ac_cv_sizeof_int _ACEOF echo "$as_me:$LINENO: checking for long" >&5 echo $ECHO_N "checking for long... $ECHO_C" >&6 if test "${ac_cv_type_long+set}" = set; then echo $ECHO_N "(cached) $ECHO_C" >&6 else cat >conftest.$ac_ext <<_ACEOF /* confdefs.h. */ _ACEOF cat confdefs.h >>conftest.$ac_ext cat >>conftest.$ac_ext <<_ACEOF /* end confdefs.h. */ $ac_includes_default int main () { if ((long *) 0) return 0; if (sizeof (long)) return 0; ; return 0; } _ACEOF rm -f conftest.$ac_objext if { (eval echo "$as_me:$LINENO: \"$ac_compile\"") >&5 (eval $ac_compile) 2>conftest.er1 ac_status=$? grep -v '^ *+' conftest.er1 >conftest.err rm -f conftest.er1 cat conftest.err >&5 echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); } && { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 (eval $ac_try) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; } && { ac_try='test -s conftest.$ac_objext' { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 (eval $ac_try) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; }; then ac_cv_type_long=yes else echo "$as_me: failed program was:" >&5 sed 's/^/| /' conftest.$ac_ext >&5 ac_cv_type_long=no fi rm -f conftest.err conftest.$ac_objext conftest.$ac_ext fi echo "$as_me:$LINENO: result: $ac_cv_type_long" >&5 echo "${ECHO_T}$ac_cv_type_long" >&6 echo "$as_me:$LINENO: checking size of long" >&5 echo $ECHO_N "checking size of long... $ECHO_C" >&6 if test "${ac_cv_sizeof_long+set}" = set; then echo $ECHO_N "(cached) $ECHO_C" >&6 else if test "$ac_cv_type_long" = yes; then # The cast to unsigned long works around a bug in the HP C Compiler # version HP92453-01 B.11.11.23709.GP, which incorrectly rejects # declarations like `int a3[[(sizeof (unsigned char)) >= 0]];'. # This bug is HP SR number 8606223364. if test "$cross_compiling" = yes; then # Depending upon the size, compute the lo and hi bounds. cat >conftest.$ac_ext <<_ACEOF /* confdefs.h. */ _ACEOF cat confdefs.h >>conftest.$ac_ext cat >>conftest.$ac_ext <<_ACEOF /* end confdefs.h. */ $ac_includes_default int main () { static int test_array [1 - 2 * !(((long) (sizeof (long))) >= 0)]; test_array [0] = 0 ; return 0; } _ACEOF rm -f conftest.$ac_objext if { (eval echo "$as_me:$LINENO: \"$ac_compile\"") >&5 (eval $ac_compile) 2>conftest.er1 ac_status=$? grep -v '^ *+' conftest.er1 >conftest.err rm -f conftest.er1 cat conftest.err >&5 echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); } && { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 (eval $ac_try) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; } && { ac_try='test -s conftest.$ac_objext' { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 (eval $ac_try) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; }; then ac_lo=0 ac_mid=0 while :; do cat >conftest.$ac_ext <<_ACEOF /* confdefs.h. */ _ACEOF cat confdefs.h >>conftest.$ac_ext cat >>conftest.$ac_ext <<_ACEOF /* end confdefs.h. */ $ac_includes_default int main () { static int test_array [1 - 2 * !(((long) (sizeof (long))) <= $ac_mid)]; test_array [0] = 0 ; return 0; } _ACEOF rm -f conftest.$ac_objext if { (eval echo "$as_me:$LINENO: \"$ac_compile\"") >&5 (eval $ac_compile) 2>conftest.er1 ac_status=$? grep -v '^ *+' conftest.er1 >conftest.err rm -f conftest.er1 cat conftest.err >&5 echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); } && { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 (eval $ac_try) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; } && { ac_try='test -s conftest.$ac_objext' { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 (eval $ac_try) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; }; then ac_hi=$ac_mid; break else echo "$as_me: failed program was:" >&5 sed 's/^/| /' conftest.$ac_ext >&5 ac_lo=`expr $ac_mid + 1` if test $ac_lo -le $ac_mid; then ac_lo= ac_hi= break fi ac_mid=`expr 2 '*' $ac_mid + 1` fi rm -f conftest.err conftest.$ac_objext conftest.$ac_ext done else echo "$as_me: failed program was:" >&5 sed 's/^/| /' conftest.$ac_ext >&5 cat >conftest.$ac_ext <<_ACEOF /* confdefs.h. */ _ACEOF cat confdefs.h >>conftest.$ac_ext cat >>conftest.$ac_ext <<_ACEOF /* end confdefs.h. */ $ac_includes_default int main () { static int test_array [1 - 2 * !(((long) (sizeof (long))) < 0)]; test_array [0] = 0 ; return 0; } _ACEOF rm -f conftest.$ac_objext if { (eval echo "$as_me:$LINENO: \"$ac_compile\"") >&5 (eval $ac_compile) 2>conftest.er1 ac_status=$? grep -v '^ *+' conftest.er1 >conftest.err rm -f conftest.er1 cat conftest.err >&5 echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); } && { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 (eval $ac_try) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; } && { ac_try='test -s conftest.$ac_objext' { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 (eval $ac_try) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; }; then ac_hi=-1 ac_mid=-1 while :; do cat >conftest.$ac_ext <<_ACEOF /* confdefs.h. */ _ACEOF cat confdefs.h >>conftest.$ac_ext cat >>conftest.$ac_ext <<_ACEOF /* end confdefs.h. */ $ac_includes_default int main () { static int test_array [1 - 2 * !(((long) (sizeof (long))) >= $ac_mid)]; test_array [0] = 0 ; return 0; } _ACEOF rm -f conftest.$ac_objext if { (eval echo "$as_me:$LINENO: \"$ac_compile\"") >&5 (eval $ac_compile) 2>conftest.er1 ac_status=$? grep -v '^ *+' conftest.er1 >conftest.err rm -f conftest.er1 cat conftest.err >&5 echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); } && { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 (eval $ac_try) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; } && { ac_try='test -s conftest.$ac_objext' { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 (eval $ac_try) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; }; then ac_lo=$ac_mid; break else echo "$as_me: failed program was:" >&5 sed 's/^/| /' conftest.$ac_ext >&5 ac_hi=`expr '(' $ac_mid ')' - 1` if test $ac_mid -le $ac_hi; then ac_lo= ac_hi= break fi ac_mid=`expr 2 '*' $ac_mid` fi rm -f conftest.err conftest.$ac_objext conftest.$ac_ext done else echo "$as_me: failed program was:" >&5 sed 's/^/| /' conftest.$ac_ext >&5 ac_lo= ac_hi= fi rm -f conftest.err conftest.$ac_objext conftest.$ac_ext fi rm -f conftest.err conftest.$ac_objext conftest.$ac_ext # Binary search between lo and hi bounds. while test "x$ac_lo" != "x$ac_hi"; do ac_mid=`expr '(' $ac_hi - $ac_lo ')' / 2 + $ac_lo` cat >conftest.$ac_ext <<_ACEOF /* confdefs.h. */ _ACEOF cat confdefs.h >>conftest.$ac_ext cat >>conftest.$ac_ext <<_ACEOF /* end confdefs.h. */ $ac_includes_default int main () { static int test_array [1 - 2 * !(((long) (sizeof (long))) <= $ac_mid)]; test_array [0] = 0 ; return 0; } _ACEOF rm -f conftest.$ac_objext if { (eval echo "$as_me:$LINENO: \"$ac_compile\"") >&5 (eval $ac_compile) 2>conftest.er1 ac_status=$? grep -v '^ *+' conftest.er1 >conftest.err rm -f conftest.er1 cat conftest.err >&5 echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); } && { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 (eval $ac_try) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; } && { ac_try='test -s conftest.$ac_objext' { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 (eval $ac_try) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; }; then ac_hi=$ac_mid else echo "$as_me: failed program was:" >&5 sed 's/^/| /' conftest.$ac_ext >&5 ac_lo=`expr '(' $ac_mid ')' + 1` fi rm -f conftest.err conftest.$ac_objext conftest.$ac_ext done case $ac_lo in ?*) ac_cv_sizeof_long=$ac_lo;; '') { { echo "$as_me:$LINENO: error: cannot compute sizeof (long), 77 See \`config.log' for more details." >&5 echo "$as_me: error: cannot compute sizeof (long), 77 See \`config.log' for more details." >&2;} { (exit 1); exit 1; }; } ;; esac else if test "$cross_compiling" = yes; then { { echo "$as_me:$LINENO: error: cannot run test program while cross compiling See \`config.log' for more details." >&5 echo "$as_me: error: cannot run test program while cross compiling See \`config.log' for more details." >&2;} { (exit 1); exit 1; }; } else cat >conftest.$ac_ext <<_ACEOF /* confdefs.h. */ _ACEOF cat confdefs.h >>conftest.$ac_ext cat >>conftest.$ac_ext <<_ACEOF /* end confdefs.h. */ $ac_includes_default long longval () { return (long) (sizeof (long)); } unsigned long ulongval () { return (long) (sizeof (long)); } #include #include int main () { FILE *f = fopen ("conftest.val", "w"); if (! f) exit (1); if (((long) (sizeof (long))) < 0) { long i = longval (); if (i != ((long) (sizeof (long)))) exit (1); fprintf (f, "%ld\n", i); } else { unsigned long i = ulongval (); if (i != ((long) (sizeof (long)))) exit (1); fprintf (f, "%lu\n", i); } exit (ferror (f) || fclose (f) != 0); ; return 0; } _ACEOF rm -f conftest$ac_exeext if { (eval echo "$as_me:$LINENO: \"$ac_link\"") >&5 (eval $ac_link) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); } && { ac_try='./conftest$ac_exeext' { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 (eval $ac_try) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; }; then ac_cv_sizeof_long=`cat conftest.val` else echo "$as_me: program exited with status $ac_status" >&5 echo "$as_me: failed program was:" >&5 sed 's/^/| /' conftest.$ac_ext >&5 ( exit $ac_status ) { { echo "$as_me:$LINENO: error: cannot compute sizeof (long), 77 See \`config.log' for more details." >&5 echo "$as_me: error: cannot compute sizeof (long), 77 See \`config.log' for more details." >&2;} { (exit 1); exit 1; }; } fi rm -f core *.core gmon.out bb.out conftest$ac_exeext conftest.$ac_objext conftest.$ac_ext fi fi rm -f conftest.val else ac_cv_sizeof_long=0 fi fi echo "$as_me:$LINENO: result: $ac_cv_sizeof_long" >&5 echo "${ECHO_T}$ac_cv_sizeof_long" >&6 cat >>confdefs.h <<_ACEOF #define SIZEOF_LONG $ac_cv_sizeof_long _ACEOF # Checks for typedefs, structures, and compiler characteristics. # Checks for library functions. for ac_header in stdlib.h do as_ac_Header=`echo "ac_cv_header_$ac_header" | $as_tr_sh` if eval "test \"\${$as_ac_Header+set}\" = set"; then echo "$as_me:$LINENO: checking for $ac_header" >&5 echo $ECHO_N "checking for $ac_header... $ECHO_C" >&6 if eval "test \"\${$as_ac_Header+set}\" = set"; then echo $ECHO_N "(cached) $ECHO_C" >&6 fi echo "$as_me:$LINENO: result: `eval echo '${'$as_ac_Header'}'`" >&5 echo "${ECHO_T}`eval echo '${'$as_ac_Header'}'`" >&6 else # Is the header compilable? echo "$as_me:$LINENO: checking $ac_header usability" >&5 echo $ECHO_N "checking $ac_header usability... $ECHO_C" >&6 cat >conftest.$ac_ext <<_ACEOF /* confdefs.h. */ _ACEOF cat confdefs.h >>conftest.$ac_ext cat >>conftest.$ac_ext <<_ACEOF /* end confdefs.h. */ $ac_includes_default #include <$ac_header> _ACEOF rm -f conftest.$ac_objext if { (eval echo "$as_me:$LINENO: \"$ac_compile\"") >&5 (eval $ac_compile) 2>conftest.er1 ac_status=$? grep -v '^ *+' conftest.er1 >conftest.err rm -f conftest.er1 cat conftest.err >&5 echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); } && { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 (eval $ac_try) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; } && { ac_try='test -s conftest.$ac_objext' { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 (eval $ac_try) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; }; then ac_header_compiler=yes else echo "$as_me: failed program was:" >&5 sed 's/^/| /' conftest.$ac_ext >&5 ac_header_compiler=no fi rm -f conftest.err conftest.$ac_objext conftest.$ac_ext echo "$as_me:$LINENO: result: $ac_header_compiler" >&5 echo "${ECHO_T}$ac_header_compiler" >&6 # Is the header present? echo "$as_me:$LINENO: checking $ac_header presence" >&5 echo $ECHO_N "checking $ac_header presence... $ECHO_C" >&6 cat >conftest.$ac_ext <<_ACEOF /* confdefs.h. */ _ACEOF cat confdefs.h >>conftest.$ac_ext cat >>conftest.$ac_ext <<_ACEOF /* end confdefs.h. */ #include <$ac_header> _ACEOF if { (eval echo "$as_me:$LINENO: \"$ac_cpp conftest.$ac_ext\"") >&5 (eval $ac_cpp conftest.$ac_ext) 2>conftest.er1 ac_status=$? grep -v '^ *+' conftest.er1 >conftest.err rm -f conftest.er1 cat conftest.err >&5 echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); } >/dev/null; then if test -s conftest.err; then ac_cpp_err=$ac_c_preproc_warn_flag ac_cpp_err=$ac_cpp_err$ac_c_werror_flag else ac_cpp_err= fi else ac_cpp_err=yes fi if test -z "$ac_cpp_err"; then ac_header_preproc=yes else echo "$as_me: failed program was:" >&5 sed 's/^/| /' conftest.$ac_ext >&5 ac_header_preproc=no fi rm -f conftest.err conftest.$ac_ext echo "$as_me:$LINENO: result: $ac_header_preproc" >&5 echo "${ECHO_T}$ac_header_preproc" >&6 # So? What about this header? case $ac_header_compiler:$ac_header_preproc:$ac_c_preproc_warn_flag in yes:no: ) { echo "$as_me:$LINENO: WARNING: $ac_header: accepted by the compiler, rejected by the preprocessor!" >&5 echo "$as_me: WARNING: $ac_header: accepted by the compiler, rejected by the preprocessor!" >&2;} { echo "$as_me:$LINENO: WARNING: $ac_header: proceeding with the compiler's result" >&5 echo "$as_me: WARNING: $ac_header: proceeding with the compiler's result" >&2;} ac_header_preproc=yes ;; no:yes:* ) { echo "$as_me:$LINENO: WARNING: $ac_header: present but cannot be compiled" >&5 echo "$as_me: WARNING: $ac_header: present but cannot be compiled" >&2;} { echo "$as_me:$LINENO: WARNING: $ac_header: check for missing prerequisite headers?" >&5 echo "$as_me: WARNING: $ac_header: check for missing prerequisite headers?" >&2;} { echo "$as_me:$LINENO: WARNING: $ac_header: see the Autoconf documentation" >&5 echo "$as_me: WARNING: $ac_header: see the Autoconf documentation" >&2;} { echo "$as_me:$LINENO: WARNING: $ac_header: section \"Present But Cannot Be Compiled\"" >&5 echo "$as_me: WARNING: $ac_header: section \"Present But Cannot Be Compiled\"" >&2;} { echo "$as_me:$LINENO: WARNING: $ac_header: proceeding with the preprocessor's result" >&5 echo "$as_me: WARNING: $ac_header: proceeding with the preprocessor's result" >&2;} { echo "$as_me:$LINENO: WARNING: $ac_header: in the future, the compiler will take precedence" >&5 echo "$as_me: WARNING: $ac_header: in the future, the compiler will take precedence" >&2;} ( cat <<\_ASBOX ## ------------------------------- ## ## Report this to the syck lists. ## ## ------------------------------- ## _ASBOX ) | sed "s/^/$as_me: WARNING: /" >&2 ;; esac echo "$as_me:$LINENO: checking for $ac_header" >&5 echo $ECHO_N "checking for $ac_header... $ECHO_C" >&6 if eval "test \"\${$as_ac_Header+set}\" = set"; then echo $ECHO_N "(cached) $ECHO_C" >&6 else eval "$as_ac_Header=\$ac_header_preproc" fi echo "$as_me:$LINENO: result: `eval echo '${'$as_ac_Header'}'`" >&5 echo "${ECHO_T}`eval echo '${'$as_ac_Header'}'`" >&6 fi if test `eval echo '${'$as_ac_Header'}'` = yes; then cat >>confdefs.h <<_ACEOF #define `echo "HAVE_$ac_header" | $as_tr_cpp` 1 _ACEOF fi done echo "$as_me:$LINENO: checking for GNU libc compatible malloc" >&5 echo $ECHO_N "checking for GNU libc compatible malloc... $ECHO_C" >&6 if test "${ac_cv_func_malloc_0_nonnull+set}" = set; then echo $ECHO_N "(cached) $ECHO_C" >&6 else if test "$cross_compiling" = yes; then ac_cv_func_malloc_0_nonnull=no else cat >conftest.$ac_ext <<_ACEOF /* confdefs.h. */ _ACEOF cat confdefs.h >>conftest.$ac_ext cat >>conftest.$ac_ext <<_ACEOF /* end confdefs.h. */ #if STDC_HEADERS || HAVE_STDLIB_H # include #else char *malloc (); #endif int main () { exit (malloc (0) ? 0 : 1); ; return 0; } _ACEOF rm -f conftest$ac_exeext if { (eval echo "$as_me:$LINENO: \"$ac_link\"") >&5 (eval $ac_link) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); } && { ac_try='./conftest$ac_exeext' { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 (eval $ac_try) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; }; then ac_cv_func_malloc_0_nonnull=yes else echo "$as_me: program exited with status $ac_status" >&5 echo "$as_me: failed program was:" >&5 sed 's/^/| /' conftest.$ac_ext >&5 ( exit $ac_status ) ac_cv_func_malloc_0_nonnull=no fi rm -f core *.core gmon.out bb.out conftest$ac_exeext conftest.$ac_objext conftest.$ac_ext fi fi echo "$as_me:$LINENO: result: $ac_cv_func_malloc_0_nonnull" >&5 echo "${ECHO_T}$ac_cv_func_malloc_0_nonnull" >&6 if test $ac_cv_func_malloc_0_nonnull = yes; then cat >>confdefs.h <<\_ACEOF #define HAVE_MALLOC 1 _ACEOF else cat >>confdefs.h <<\_ACEOF #define HAVE_MALLOC 0 _ACEOF case $LIBOBJS in "malloc.$ac_objext" | \ *" malloc.$ac_objext" | \ "malloc.$ac_objext "* | \ *" malloc.$ac_objext "* ) ;; *) LIBOBJS="$LIBOBJS malloc.$ac_objext" ;; esac cat >>confdefs.h <<\_ACEOF #define malloc rpl_malloc _ACEOF fi for ac_func in vprintf do as_ac_var=`echo "ac_cv_func_$ac_func" | $as_tr_sh` echo "$as_me:$LINENO: checking for $ac_func" >&5 echo $ECHO_N "checking for $ac_func... $ECHO_C" >&6 if eval "test \"\${$as_ac_var+set}\" = set"; then echo $ECHO_N "(cached) $ECHO_C" >&6 else cat >conftest.$ac_ext <<_ACEOF /* confdefs.h. */ _ACEOF cat confdefs.h >>conftest.$ac_ext cat >>conftest.$ac_ext <<_ACEOF /* end confdefs.h. */ /* Define $ac_func to an innocuous variant, in case declares $ac_func. For example, HP-UX 11i declares gettimeofday. */ #define $ac_func innocuous_$ac_func /* System header to define __stub macros and hopefully few prototypes, which can conflict with char $ac_func (); below. Prefer to if __STDC__ is defined, since exists even on freestanding compilers. */ #ifdef __STDC__ # include #else # include #endif #undef $ac_func /* Override any gcc2 internal prototype to avoid an error. */ #ifdef __cplusplus extern "C" { #endif /* We use char because int might match the return type of a gcc2 builtin and then its argument prototype would still apply. */ char $ac_func (); /* The GNU C library defines this for functions which it implements to always fail with ENOSYS. Some functions are actually named something starting with __ and the normal name is an alias. */ #if defined (__stub_$ac_func) || defined (__stub___$ac_func) choke me #else char (*f) () = $ac_func; #endif #ifdef __cplusplus } #endif int main () { return f != $ac_func; ; return 0; } _ACEOF rm -f conftest.$ac_objext conftest$ac_exeext if { (eval echo "$as_me:$LINENO: \"$ac_link\"") >&5 (eval $ac_link) 2>conftest.er1 ac_status=$? grep -v '^ *+' conftest.er1 >conftest.err rm -f conftest.er1 cat conftest.err >&5 echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); } && { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 (eval $ac_try) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; } && { ac_try='test -s conftest$ac_exeext' { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 (eval $ac_try) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; }; then eval "$as_ac_var=yes" else echo "$as_me: failed program was:" >&5 sed 's/^/| /' conftest.$ac_ext >&5 eval "$as_ac_var=no" fi rm -f conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext fi echo "$as_me:$LINENO: result: `eval echo '${'$as_ac_var'}'`" >&5 echo "${ECHO_T}`eval echo '${'$as_ac_var'}'`" >&6 if test `eval echo '${'$as_ac_var'}'` = yes; then cat >>confdefs.h <<_ACEOF #define `echo "HAVE_$ac_func" | $as_tr_cpp` 1 _ACEOF echo "$as_me:$LINENO: checking for _doprnt" >&5 echo $ECHO_N "checking for _doprnt... $ECHO_C" >&6 if test "${ac_cv_func__doprnt+set}" = set; then echo $ECHO_N "(cached) $ECHO_C" >&6 else cat >conftest.$ac_ext <<_ACEOF /* confdefs.h. */ _ACEOF cat confdefs.h >>conftest.$ac_ext cat >>conftest.$ac_ext <<_ACEOF /* end confdefs.h. */ /* Define _doprnt to an innocuous variant, in case declares _doprnt. For example, HP-UX 11i declares gettimeofday. */ #define _doprnt innocuous__doprnt /* System header to define __stub macros and hopefully few prototypes, which can conflict with char _doprnt (); below. Prefer to if __STDC__ is defined, since exists even on freestanding compilers. */ #ifdef __STDC__ # include #else # include #endif #undef _doprnt /* Override any gcc2 internal prototype to avoid an error. */ #ifdef __cplusplus extern "C" { #endif /* We use char because int might match the return type of a gcc2 builtin and then its argument prototype would still apply. */ char _doprnt (); /* The GNU C library defines this for functions which it implements to always fail with ENOSYS. Some functions are actually named something starting with __ and the normal name is an alias. */ #if defined (__stub__doprnt) || defined (__stub____doprnt) choke me #else char (*f) () = _doprnt; #endif #ifdef __cplusplus } #endif int main () { return f != _doprnt; ; return 0; } _ACEOF rm -f conftest.$ac_objext conftest$ac_exeext if { (eval echo "$as_me:$LINENO: \"$ac_link\"") >&5 (eval $ac_link) 2>conftest.er1 ac_status=$? grep -v '^ *+' conftest.er1 >conftest.err rm -f conftest.er1 cat conftest.err >&5 echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); } && { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 (eval $ac_try) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; } && { ac_try='test -s conftest$ac_exeext' { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 (eval $ac_try) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; }; then ac_cv_func__doprnt=yes else echo "$as_me: failed program was:" >&5 sed 's/^/| /' conftest.$ac_ext >&5 ac_cv_func__doprnt=no fi rm -f conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext fi echo "$as_me:$LINENO: result: $ac_cv_func__doprnt" >&5 echo "${ECHO_T}$ac_cv_func__doprnt" >&6 if test $ac_cv_func__doprnt = yes; then cat >>confdefs.h <<\_ACEOF #define HAVE_DOPRNT 1 _ACEOF fi fi done ac_config_files="$ac_config_files Makefile lib/Makefile tests/Makefile" cat >confcache <<\_ACEOF # This file is a shell script that caches the results of configure # tests run on this system so they can be shared between configure # scripts and configure runs, see configure's option --config-cache. # It is not useful on other systems. If it contains results you don't # want to keep, you may remove or edit it. # # config.status only pays attention to the cache file if you give it # the --recheck option to rerun configure. # # `ac_cv_env_foo' variables (set or unset) will be overridden when # loading this file, other *unset* `ac_cv_foo' will be assigned the # following values. _ACEOF # The following way of writing the cache mishandles newlines in values, # but we know of no workaround that is simple, portable, and efficient. # So, don't put newlines in cache variables' values. # Ultrix sh set writes to stderr and can't be redirected directly, # and sets the high bit in the cache file unless we assign to the vars. { (set) 2>&1 | case `(ac_space=' '; set | grep ac_space) 2>&1` in *ac_space=\ *) # `set' does not quote correctly, so add quotes (double-quote # substitution turns \\\\ into \\, and sed turns \\ into \). sed -n \ "s/'/'\\\\''/g; s/^\\([_$as_cr_alnum]*_cv_[_$as_cr_alnum]*\\)=\\(.*\\)/\\1='\\2'/p" ;; *) # `set' quotes correctly as required by POSIX, so do not add quotes. sed -n \ "s/^\\([_$as_cr_alnum]*_cv_[_$as_cr_alnum]*\\)=\\(.*\\)/\\1=\\2/p" ;; esac; } | sed ' t clear : clear s/^\([^=]*\)=\(.*[{}].*\)$/test "${\1+set}" = set || &/ t end /^ac_cv_env/!s/^\([^=]*\)=\(.*\)$/\1=${\1=\2}/ : end' >>confcache if diff $cache_file confcache >/dev/null 2>&1; then :; else if test -w $cache_file; then test "x$cache_file" != "x/dev/null" && echo "updating cache $cache_file" cat confcache >$cache_file else echo "not updating unwritable cache $cache_file" fi fi rm -f confcache test "x$prefix" = xNONE && prefix=$ac_default_prefix # Let make expand exec_prefix. test "x$exec_prefix" = xNONE && exec_prefix='${prefix}' # VPATH may cause trouble with some makes, so we remove $(srcdir), # ${srcdir} and @srcdir@ from VPATH if srcdir is ".", strip leading and # trailing colons and then remove the whole line if VPATH becomes empty # (actually we leave an empty line to preserve line numbers). if test "x$srcdir" = x.; then ac_vpsub='/^[ ]*VPATH[ ]*=/{ s/:*\$(srcdir):*/:/; s/:*\${srcdir}:*/:/; s/:*@srcdir@:*/:/; s/^\([^=]*=[ ]*\):*/\1/; s/:*$//; s/^[^=]*=[ ]*$//; }' fi DEFS=-DHAVE_CONFIG_H ac_libobjs= ac_ltlibobjs= for ac_i in : $LIBOBJS; do test "x$ac_i" = x: && continue # 1. Remove the extension, and $U if already installed. ac_i=`echo "$ac_i" | sed 's/\$U\././;s/\.o$//;s/\.obj$//'` # 2. Add them. ac_libobjs="$ac_libobjs $ac_i\$U.$ac_objext" ac_ltlibobjs="$ac_ltlibobjs $ac_i"'$U.lo' done LIBOBJS=$ac_libobjs LTLIBOBJS=$ac_ltlibobjs if test -z "${AMDEP_TRUE}" && test -z "${AMDEP_FALSE}"; then { { echo "$as_me:$LINENO: error: conditional \"AMDEP\" was never defined. Usually this means the macro was only invoked conditionally." >&5 echo "$as_me: error: conditional \"AMDEP\" was never defined. Usually this means the macro was only invoked conditionally." >&2;} { (exit 1); exit 1; }; } fi if test -z "${am__fastdepCC_TRUE}" && test -z "${am__fastdepCC_FALSE}"; then { { echo "$as_me:$LINENO: error: conditional \"am__fastdepCC\" was never defined. Usually this means the macro was only invoked conditionally." >&5 echo "$as_me: error: conditional \"am__fastdepCC\" was never defined. Usually this means the macro was only invoked conditionally." >&2;} { (exit 1); exit 1; }; } fi : ${CONFIG_STATUS=./config.status} ac_clean_files_save=$ac_clean_files ac_clean_files="$ac_clean_files $CONFIG_STATUS" { echo "$as_me:$LINENO: creating $CONFIG_STATUS" >&5 echo "$as_me: creating $CONFIG_STATUS" >&6;} cat >$CONFIG_STATUS <<_ACEOF #! $SHELL # Generated by $as_me. # Run this file to recreate the current configuration. # Compiler output produced by configure, useful for debugging # configure, is in config.log if it exists. debug=false ac_cs_recheck=false ac_cs_silent=false SHELL=\${CONFIG_SHELL-$SHELL} _ACEOF cat >>$CONFIG_STATUS <<\_ACEOF ## --------------------- ## ## M4sh Initialization. ## ## --------------------- ## # Be Bourne compatible if test -n "${ZSH_VERSION+set}" && (emulate sh) >/dev/null 2>&1; then emulate sh NULLCMD=: # Zsh 3.x and 4.x performs word splitting on ${1+"$@"}, which # is contrary to our usage. Disable this feature. alias -g '${1+"$@"}'='"$@"' elif test -n "${BASH_VERSION+set}" && (set -o posix) >/dev/null 2>&1; then set -o posix fi DUALCASE=1; export DUALCASE # for MKS sh # Support unset when possible. if ( (MAIL=60; unset MAIL) || exit) >/dev/null 2>&1; then as_unset=unset else as_unset=false fi # Work around bugs in pre-3.0 UWIN ksh. $as_unset ENV MAIL MAILPATH PS1='$ ' PS2='> ' PS4='+ ' # NLS nuisances. for as_var in \ LANG LANGUAGE LC_ADDRESS LC_ALL LC_COLLATE LC_CTYPE LC_IDENTIFICATION \ LC_MEASUREMENT LC_MESSAGES LC_MONETARY LC_NAME LC_NUMERIC LC_PAPER \ LC_TELEPHONE LC_TIME do if (set +x; test -z "`(eval $as_var=C; export $as_var) 2>&1`"); then eval $as_var=C; export $as_var else $as_unset $as_var fi done # Required to use basename. if expr a : '\(a\)' >/dev/null 2>&1; then as_expr=expr else as_expr=false fi if (basename /) >/dev/null 2>&1 && test "X`basename / 2>&1`" = "X/"; then as_basename=basename else as_basename=false fi # Name of the executable. as_me=`$as_basename "$0" || $as_expr X/"$0" : '.*/\([^/][^/]*\)/*$' \| \ X"$0" : 'X\(//\)$' \| \ X"$0" : 'X\(/\)$' \| \ . : '\(.\)' 2>/dev/null || echo X/"$0" | sed '/^.*\/\([^/][^/]*\)\/*$/{ s//\1/; q; } /^X\/\(\/\/\)$/{ s//\1/; q; } /^X\/\(\/\).*/{ s//\1/; q; } s/.*/./; q'` # PATH needs CR, and LINENO needs CR and PATH. # Avoid depending upon Character Ranges. as_cr_letters='abcdefghijklmnopqrstuvwxyz' as_cr_LETTERS='ABCDEFGHIJKLMNOPQRSTUVWXYZ' as_cr_Letters=$as_cr_letters$as_cr_LETTERS as_cr_digits='0123456789' as_cr_alnum=$as_cr_Letters$as_cr_digits # The user is always right. if test "${PATH_SEPARATOR+set}" != set; then echo "#! /bin/sh" >conf$$.sh echo "exit 0" >>conf$$.sh chmod +x conf$$.sh if (PATH="/nonexistent;."; conf$$.sh) >/dev/null 2>&1; then PATH_SEPARATOR=';' else PATH_SEPARATOR=: fi rm -f conf$$.sh fi as_lineno_1=$LINENO as_lineno_2=$LINENO as_lineno_3=`(expr $as_lineno_1 + 1) 2>/dev/null` test "x$as_lineno_1" != "x$as_lineno_2" && test "x$as_lineno_3" = "x$as_lineno_2" || { # Find who we are. Look in the path if we contain no path at all # relative or not. case $0 in *[\\/]* ) as_myself=$0 ;; *) as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. test -r "$as_dir/$0" && as_myself=$as_dir/$0 && break done ;; esac # We did not find ourselves, most probably we were run as `sh COMMAND' # in which case we are not to be found in the path. if test "x$as_myself" = x; then as_myself=$0 fi if test ! -f "$as_myself"; then { { echo "$as_me:$LINENO: error: cannot find myself; rerun with an absolute path" >&5 echo "$as_me: error: cannot find myself; rerun with an absolute path" >&2;} { (exit 1); exit 1; }; } fi case $CONFIG_SHELL in '') as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in /bin$PATH_SEPARATOR/usr/bin$PATH_SEPARATOR$PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for as_base in sh bash ksh sh5; do case $as_dir in /*) if ("$as_dir/$as_base" -c ' as_lineno_1=$LINENO as_lineno_2=$LINENO as_lineno_3=`(expr $as_lineno_1 + 1) 2>/dev/null` test "x$as_lineno_1" != "x$as_lineno_2" && test "x$as_lineno_3" = "x$as_lineno_2" ') 2>/dev/null; then $as_unset BASH_ENV || test "${BASH_ENV+set}" != set || { BASH_ENV=; export BASH_ENV; } $as_unset ENV || test "${ENV+set}" != set || { ENV=; export ENV; } CONFIG_SHELL=$as_dir/$as_base export CONFIG_SHELL exec "$CONFIG_SHELL" "$0" ${1+"$@"} fi;; esac done done ;; esac # Create $as_me.lineno as a copy of $as_myself, but with $LINENO # uniformly replaced by the line number. The first 'sed' inserts a # line-number line before each line; the second 'sed' does the real # work. The second script uses 'N' to pair each line-number line # with the numbered line, and appends trailing '-' during # substitution so that $LINENO is not a special case at line end. # (Raja R Harinath suggested sed '=', and Paul Eggert wrote the # second 'sed' script. Blame Lee E. McMahon for sed's syntax. :-) sed '=' <$as_myself | sed ' N s,$,-, : loop s,^\(['$as_cr_digits']*\)\(.*\)[$]LINENO\([^'$as_cr_alnum'_]\),\1\2\1\3, t loop s,-$,, s,^['$as_cr_digits']*\n,, ' >$as_me.lineno && chmod +x $as_me.lineno || { { echo "$as_me:$LINENO: error: cannot create $as_me.lineno; rerun with a POSIX shell" >&5 echo "$as_me: error: cannot create $as_me.lineno; rerun with a POSIX shell" >&2;} { (exit 1); exit 1; }; } # Don't try to exec as it changes $[0], causing all sort of problems # (the dirname of $[0] is not the place where we might find the # original and so on. Autoconf is especially sensible to this). . ./$as_me.lineno # Exit status is that of the last command. exit } case `echo "testing\c"; echo 1,2,3`,`echo -n testing; echo 1,2,3` in *c*,-n*) ECHO_N= ECHO_C=' ' ECHO_T=' ' ;; *c*,* ) ECHO_N=-n ECHO_C= ECHO_T= ;; *) ECHO_N= ECHO_C='\c' ECHO_T= ;; esac if expr a : '\(a\)' >/dev/null 2>&1; then as_expr=expr else as_expr=false fi rm -f conf$$ conf$$.exe conf$$.file echo >conf$$.file if ln -s conf$$.file conf$$ 2>/dev/null; then # We could just check for DJGPP; but this test a) works b) is more generic # and c) will remain valid once DJGPP supports symlinks (DJGPP 2.04). if test -f conf$$.exe; then # Don't use ln at all; we don't have any links as_ln_s='cp -p' else as_ln_s='ln -s' fi elif ln conf$$.file conf$$ 2>/dev/null; then as_ln_s=ln else as_ln_s='cp -p' fi rm -f conf$$ conf$$.exe conf$$.file if mkdir -p . 2>/dev/null; then as_mkdir_p=: else test -d ./-p && rmdir ./-p as_mkdir_p=false fi as_executable_p="test -f" # Sed expression to map a string onto a valid CPP name. as_tr_cpp="eval sed 'y%*$as_cr_letters%P$as_cr_LETTERS%;s%[^_$as_cr_alnum]%_%g'" # Sed expression to map a string onto a valid variable name. as_tr_sh="eval sed 'y%*+%pp%;s%[^_$as_cr_alnum]%_%g'" # IFS # We need space, tab and new line, in precisely that order. as_nl=' ' IFS=" $as_nl" # CDPATH. $as_unset CDPATH exec 6>&1 # Open the log real soon, to keep \$[0] and so on meaningful, and to # report actual input values of CONFIG_FILES etc. instead of their # values after options handling. Logging --version etc. is OK. exec 5>>config.log { echo sed 'h;s/./-/g;s/^.../## /;s/...$/ ##/;p;x;p;x' <<_ASBOX ## Running $as_me. ## _ASBOX } >&5 cat >&5 <<_CSEOF This file was extended by syck $as_me 0.54, which was generated by GNU Autoconf 2.59. Invocation command line was CONFIG_FILES = $CONFIG_FILES CONFIG_HEADERS = $CONFIG_HEADERS CONFIG_LINKS = $CONFIG_LINKS CONFIG_COMMANDS = $CONFIG_COMMANDS $ $0 $@ _CSEOF echo "on `(hostname || uname -n) 2>/dev/null | sed 1q`" >&5 echo >&5 _ACEOF # Files that config.status was made for. if test -n "$ac_config_files"; then echo "config_files=\"$ac_config_files\"" >>$CONFIG_STATUS fi if test -n "$ac_config_headers"; then echo "config_headers=\"$ac_config_headers\"" >>$CONFIG_STATUS fi if test -n "$ac_config_links"; then echo "config_links=\"$ac_config_links\"" >>$CONFIG_STATUS fi if test -n "$ac_config_commands"; then echo "config_commands=\"$ac_config_commands\"" >>$CONFIG_STATUS fi cat >>$CONFIG_STATUS <<\_ACEOF ac_cs_usage="\ \`$as_me' instantiates files from templates according to the current configuration. Usage: $0 [OPTIONS] [FILE]... -h, --help print this help, then exit -V, --version print version number, then exit -q, --quiet do not print progress messages -d, --debug don't remove temporary files --recheck update $as_me by reconfiguring in the same conditions --file=FILE[:TEMPLATE] instantiate the configuration file FILE --header=FILE[:TEMPLATE] instantiate the configuration header FILE Configuration files: $config_files Configuration headers: $config_headers Configuration commands: $config_commands Report bugs to ." _ACEOF cat >>$CONFIG_STATUS <<_ACEOF ac_cs_version="\\ syck config.status 0.54 configured by $0, generated by GNU Autoconf 2.59, with options \\"`echo "$ac_configure_args" | sed 's/[\\""\`\$]/\\\\&/g'`\\" Copyright (C) 2003 Free Software Foundation, Inc. This config.status script is free software; the Free Software Foundation gives unlimited permission to copy, distribute and modify it." srcdir=$srcdir INSTALL="$INSTALL" _ACEOF cat >>$CONFIG_STATUS <<\_ACEOF # If no file are specified by the user, then we need to provide default # value. By we need to know if files were specified by the user. ac_need_defaults=: while test $# != 0 do case $1 in --*=*) ac_option=`expr "x$1" : 'x\([^=]*\)='` ac_optarg=`expr "x$1" : 'x[^=]*=\(.*\)'` ac_shift=: ;; -*) ac_option=$1 ac_optarg=$2 ac_shift=shift ;; *) # This is not an option, so the user has probably given explicit # arguments. ac_option=$1 ac_need_defaults=false;; esac case $ac_option in # Handling of the options. _ACEOF cat >>$CONFIG_STATUS <<\_ACEOF -recheck | --recheck | --rechec | --reche | --rech | --rec | --re | --r) ac_cs_recheck=: ;; --version | --vers* | -V ) echo "$ac_cs_version"; exit 0 ;; --he | --h) # Conflict between --help and --header { { echo "$as_me:$LINENO: error: ambiguous option: $1 Try \`$0 --help' for more information." >&5 echo "$as_me: error: ambiguous option: $1 Try \`$0 --help' for more information." >&2;} { (exit 1); exit 1; }; };; --help | --hel | -h ) echo "$ac_cs_usage"; exit 0 ;; --debug | --d* | -d ) debug=: ;; --file | --fil | --fi | --f ) $ac_shift CONFIG_FILES="$CONFIG_FILES $ac_optarg" ac_need_defaults=false;; --header | --heade | --head | --hea ) $ac_shift CONFIG_HEADERS="$CONFIG_HEADERS $ac_optarg" ac_need_defaults=false;; -q | -quiet | --quiet | --quie | --qui | --qu | --q \ | -silent | --silent | --silen | --sile | --sil | --si | --s) ac_cs_silent=: ;; # This is an error. -*) { { echo "$as_me:$LINENO: error: unrecognized option: $1 Try \`$0 --help' for more information." >&5 echo "$as_me: error: unrecognized option: $1 Try \`$0 --help' for more information." >&2;} { (exit 1); exit 1; }; } ;; *) ac_config_targets="$ac_config_targets $1" ;; esac shift done ac_configure_extra_args= if $ac_cs_silent; then exec 6>/dev/null ac_configure_extra_args="$ac_configure_extra_args --silent" fi _ACEOF cat >>$CONFIG_STATUS <<_ACEOF if \$ac_cs_recheck; then echo "running $SHELL $0 " $ac_configure_args \$ac_configure_extra_args " --no-create --no-recursion" >&6 exec $SHELL $0 $ac_configure_args \$ac_configure_extra_args --no-create --no-recursion fi _ACEOF cat >>$CONFIG_STATUS <<_ACEOF # # INIT-COMMANDS section. # AMDEP_TRUE="$AMDEP_TRUE" ac_aux_dir="$ac_aux_dir" _ACEOF cat >>$CONFIG_STATUS <<\_ACEOF for ac_config_target in $ac_config_targets do case "$ac_config_target" in # Handling of arguments. "Makefile" ) CONFIG_FILES="$CONFIG_FILES Makefile" ;; "lib/Makefile" ) CONFIG_FILES="$CONFIG_FILES lib/Makefile" ;; "tests/Makefile" ) CONFIG_FILES="$CONFIG_FILES tests/Makefile" ;; "depfiles" ) CONFIG_COMMANDS="$CONFIG_COMMANDS depfiles" ;; "config.h" ) CONFIG_HEADERS="$CONFIG_HEADERS config.h" ;; *) { { echo "$as_me:$LINENO: error: invalid argument: $ac_config_target" >&5 echo "$as_me: error: invalid argument: $ac_config_target" >&2;} { (exit 1); exit 1; }; };; esac done # If the user did not use the arguments to specify the items to instantiate, # then the envvar interface is used. Set only those that are not. # We use the long form for the default assignment because of an extremely # bizarre bug on SunOS 4.1.3. if $ac_need_defaults; then test "${CONFIG_FILES+set}" = set || CONFIG_FILES=$config_files test "${CONFIG_HEADERS+set}" = set || CONFIG_HEADERS=$config_headers test "${CONFIG_COMMANDS+set}" = set || CONFIG_COMMANDS=$config_commands fi # Have a temporary directory for convenience. Make it in the build tree # simply because there is no reason to put it here, and in addition, # creating and moving files from /tmp can sometimes cause problems. # Create a temporary directory, and hook for its removal unless debugging. $debug || { trap 'exit_status=$?; rm -rf $tmp && exit $exit_status' 0 trap '{ (exit 1); exit 1; }' 1 2 13 15 } # Create a (secure) tmp directory for tmp files. { tmp=`(umask 077 && mktemp -d -q "./confstatXXXXXX") 2>/dev/null` && test -n "$tmp" && test -d "$tmp" } || { tmp=./confstat$$-$RANDOM (umask 077 && mkdir $tmp) } || { echo "$me: cannot create a temporary directory in ." >&2 { (exit 1); exit 1; } } _ACEOF cat >>$CONFIG_STATUS <<_ACEOF # # CONFIG_FILES section. # # No need to generate the scripts if there are no CONFIG_FILES. # This happens for instance when ./config.status config.h if test -n "\$CONFIG_FILES"; then # Protect against being on the right side of a sed subst in config.status. sed 's/,@/@@/; s/@,/@@/; s/,;t t\$/@;t t/; /@;t t\$/s/[\\\\&,]/\\\\&/g; s/@@/,@/; s/@@/@,/; s/@;t t\$/,;t t/' >\$tmp/subs.sed <<\\CEOF s,@SHELL@,$SHELL,;t t s,@PATH_SEPARATOR@,$PATH_SEPARATOR,;t t s,@PACKAGE_NAME@,$PACKAGE_NAME,;t t s,@PACKAGE_TARNAME@,$PACKAGE_TARNAME,;t t s,@PACKAGE_VERSION@,$PACKAGE_VERSION,;t t s,@PACKAGE_STRING@,$PACKAGE_STRING,;t t s,@PACKAGE_BUGREPORT@,$PACKAGE_BUGREPORT,;t t s,@exec_prefix@,$exec_prefix,;t t s,@prefix@,$prefix,;t t s,@program_transform_name@,$program_transform_name,;t t s,@bindir@,$bindir,;t t s,@sbindir@,$sbindir,;t t s,@libexecdir@,$libexecdir,;t t s,@datadir@,$datadir,;t t s,@sysconfdir@,$sysconfdir,;t t s,@sharedstatedir@,$sharedstatedir,;t t s,@localstatedir@,$localstatedir,;t t s,@libdir@,$libdir,;t t s,@includedir@,$includedir,;t t s,@oldincludedir@,$oldincludedir,;t t s,@infodir@,$infodir,;t t s,@mandir@,$mandir,;t t s,@build_alias@,$build_alias,;t t s,@host_alias@,$host_alias,;t t s,@target_alias@,$target_alias,;t t s,@DEFS@,$DEFS,;t t s,@ECHO_C@,$ECHO_C,;t t s,@ECHO_N@,$ECHO_N,;t t s,@ECHO_T@,$ECHO_T,;t t s,@LIBS@,$LIBS,;t t s,@INSTALL_PROGRAM@,$INSTALL_PROGRAM,;t t s,@INSTALL_SCRIPT@,$INSTALL_SCRIPT,;t t s,@INSTALL_DATA@,$INSTALL_DATA,;t t s,@CYGPATH_W@,$CYGPATH_W,;t t s,@PACKAGE@,$PACKAGE,;t t s,@VERSION@,$VERSION,;t t s,@ACLOCAL@,$ACLOCAL,;t t s,@AUTOCONF@,$AUTOCONF,;t t s,@AUTOMAKE@,$AUTOMAKE,;t t s,@AUTOHEADER@,$AUTOHEADER,;t t s,@MAKEINFO@,$MAKEINFO,;t t s,@install_sh@,$install_sh,;t t s,@STRIP@,$STRIP,;t t s,@ac_ct_STRIP@,$ac_ct_STRIP,;t t s,@INSTALL_STRIP_PROGRAM@,$INSTALL_STRIP_PROGRAM,;t t s,@mkdir_p@,$mkdir_p,;t t s,@AWK@,$AWK,;t t s,@SET_MAKE@,$SET_MAKE,;t t s,@am__leading_dot@,$am__leading_dot,;t t s,@AMTAR@,$AMTAR,;t t s,@am__tar@,$am__tar,;t t s,@am__untar@,$am__untar,;t t s,@LN_S@,$LN_S,;t t s,@RANLIB@,$RANLIB,;t t s,@ac_ct_RANLIB@,$ac_ct_RANLIB,;t t s,@YACC@,$YACC,;t t s,@LEX@,$LEX,;t t s,@CC@,$CC,;t t s,@CFLAGS@,$CFLAGS,;t t s,@LDFLAGS@,$LDFLAGS,;t t s,@CPPFLAGS@,$CPPFLAGS,;t t s,@ac_ct_CC@,$ac_ct_CC,;t t s,@EXEEXT@,$EXEEXT,;t t s,@OBJEXT@,$OBJEXT,;t t s,@DEPDIR@,$DEPDIR,;t t s,@am__include@,$am__include,;t t s,@am__quote@,$am__quote,;t t s,@AMDEP_TRUE@,$AMDEP_TRUE,;t t s,@AMDEP_FALSE@,$AMDEP_FALSE,;t t s,@AMDEPBACKSLASH@,$AMDEPBACKSLASH,;t t s,@CCDEPMODE@,$CCDEPMODE,;t t s,@am__fastdepCC_TRUE@,$am__fastdepCC_TRUE,;t t s,@am__fastdepCC_FALSE@,$am__fastdepCC_FALSE,;t t s,@LEXLIB@,$LEXLIB,;t t s,@LEX_OUTPUT_ROOT@,$LEX_OUTPUT_ROOT,;t t s,@CPP@,$CPP,;t t s,@EGREP@,$EGREP,;t t s,@LIBOBJS@,$LIBOBJS,;t t s,@LTLIBOBJS@,$LTLIBOBJS,;t t CEOF _ACEOF cat >>$CONFIG_STATUS <<\_ACEOF # Split the substitutions into bite-sized pieces for seds with # small command number limits, like on Digital OSF/1 and HP-UX. ac_max_sed_lines=48 ac_sed_frag=1 # Number of current file. ac_beg=1 # First line for current file. ac_end=$ac_max_sed_lines # Line after last line for current file. ac_more_lines=: ac_sed_cmds= while $ac_more_lines; do if test $ac_beg -gt 1; then sed "1,${ac_beg}d; ${ac_end}q" $tmp/subs.sed >$tmp/subs.frag else sed "${ac_end}q" $tmp/subs.sed >$tmp/subs.frag fi if test ! -s $tmp/subs.frag; then ac_more_lines=false else # The purpose of the label and of the branching condition is to # speed up the sed processing (if there are no `@' at all, there # is no need to browse any of the substitutions). # These are the two extra sed commands mentioned above. (echo ':t /@[a-zA-Z_][a-zA-Z_0-9]*@/!b' && cat $tmp/subs.frag) >$tmp/subs-$ac_sed_frag.sed if test -z "$ac_sed_cmds"; then ac_sed_cmds="sed -f $tmp/subs-$ac_sed_frag.sed" else ac_sed_cmds="$ac_sed_cmds | sed -f $tmp/subs-$ac_sed_frag.sed" fi ac_sed_frag=`expr $ac_sed_frag + 1` ac_beg=$ac_end ac_end=`expr $ac_end + $ac_max_sed_lines` fi done if test -z "$ac_sed_cmds"; then ac_sed_cmds=cat fi fi # test -n "$CONFIG_FILES" _ACEOF cat >>$CONFIG_STATUS <<\_ACEOF for ac_file in : $CONFIG_FILES; do test "x$ac_file" = x: && continue # Support "outfile[:infile[:infile...]]", defaulting infile="outfile.in". case $ac_file in - | *:- | *:-:* ) # input from stdin cat >$tmp/stdin ac_file_in=`echo "$ac_file" | sed 's,[^:]*:,,'` ac_file=`echo "$ac_file" | sed 's,:.*,,'` ;; *:* ) ac_file_in=`echo "$ac_file" | sed 's,[^:]*:,,'` ac_file=`echo "$ac_file" | sed 's,:.*,,'` ;; * ) ac_file_in=$ac_file.in ;; esac # Compute @srcdir@, @top_srcdir@, and @INSTALL@ for subdirectories. ac_dir=`(dirname "$ac_file") 2>/dev/null || $as_expr X"$ac_file" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ X"$ac_file" : 'X\(//\)[^/]' \| \ X"$ac_file" : 'X\(//\)$' \| \ X"$ac_file" : 'X\(/\)' \| \ . : '\(.\)' 2>/dev/null || echo X"$ac_file" | sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ s//\1/; q; } /^X\(\/\/\)[^/].*/{ s//\1/; q; } /^X\(\/\/\)$/{ s//\1/; q; } /^X\(\/\).*/{ s//\1/; q; } s/.*/./; q'` { if $as_mkdir_p; then mkdir -p "$ac_dir" else as_dir="$ac_dir" as_dirs= while test ! -d "$as_dir"; do as_dirs="$as_dir $as_dirs" as_dir=`(dirname "$as_dir") 2>/dev/null || $as_expr X"$as_dir" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ X"$as_dir" : 'X\(//\)[^/]' \| \ X"$as_dir" : 'X\(//\)$' \| \ X"$as_dir" : 'X\(/\)' \| \ . : '\(.\)' 2>/dev/null || echo X"$as_dir" | sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ s//\1/; q; } /^X\(\/\/\)[^/].*/{ s//\1/; q; } /^X\(\/\/\)$/{ s//\1/; q; } /^X\(\/\).*/{ s//\1/; q; } s/.*/./; q'` done test ! -n "$as_dirs" || mkdir $as_dirs fi || { { echo "$as_me:$LINENO: error: cannot create directory \"$ac_dir\"" >&5 echo "$as_me: error: cannot create directory \"$ac_dir\"" >&2;} { (exit 1); exit 1; }; }; } ac_builddir=. if test "$ac_dir" != .; then ac_dir_suffix=/`echo "$ac_dir" | sed 's,^\.[\\/],,'` # A "../" for each directory in $ac_dir_suffix. ac_top_builddir=`echo "$ac_dir_suffix" | sed 's,/[^\\/]*,../,g'` else ac_dir_suffix= ac_top_builddir= fi case $srcdir in .) # No --srcdir option. We are building in place. ac_srcdir=. if test -z "$ac_top_builddir"; then ac_top_srcdir=. else ac_top_srcdir=`echo $ac_top_builddir | sed 's,/$,,'` fi ;; [\\/]* | ?:[\\/]* ) # Absolute path. ac_srcdir=$srcdir$ac_dir_suffix; ac_top_srcdir=$srcdir ;; *) # Relative path. ac_srcdir=$ac_top_builddir$srcdir$ac_dir_suffix ac_top_srcdir=$ac_top_builddir$srcdir ;; esac # Do not use `cd foo && pwd` to compute absolute paths, because # the directories may not exist. case `pwd` in .) ac_abs_builddir="$ac_dir";; *) case "$ac_dir" in .) ac_abs_builddir=`pwd`;; [\\/]* | ?:[\\/]* ) ac_abs_builddir="$ac_dir";; *) ac_abs_builddir=`pwd`/"$ac_dir";; esac;; esac case $ac_abs_builddir in .) ac_abs_top_builddir=${ac_top_builddir}.;; *) case ${ac_top_builddir}. in .) ac_abs_top_builddir=$ac_abs_builddir;; [\\/]* | ?:[\\/]* ) ac_abs_top_builddir=${ac_top_builddir}.;; *) ac_abs_top_builddir=$ac_abs_builddir/${ac_top_builddir}.;; esac;; esac case $ac_abs_builddir in .) ac_abs_srcdir=$ac_srcdir;; *) case $ac_srcdir in .) ac_abs_srcdir=$ac_abs_builddir;; [\\/]* | ?:[\\/]* ) ac_abs_srcdir=$ac_srcdir;; *) ac_abs_srcdir=$ac_abs_builddir/$ac_srcdir;; esac;; esac case $ac_abs_builddir in .) ac_abs_top_srcdir=$ac_top_srcdir;; *) case $ac_top_srcdir in .) ac_abs_top_srcdir=$ac_abs_builddir;; [\\/]* | ?:[\\/]* ) ac_abs_top_srcdir=$ac_top_srcdir;; *) ac_abs_top_srcdir=$ac_abs_builddir/$ac_top_srcdir;; esac;; esac case $INSTALL in [\\/$]* | ?:[\\/]* ) ac_INSTALL=$INSTALL ;; *) ac_INSTALL=$ac_top_builddir$INSTALL ;; esac if test x"$ac_file" != x-; then { echo "$as_me:$LINENO: creating $ac_file" >&5 echo "$as_me: creating $ac_file" >&6;} rm -f "$ac_file" fi # Let's still pretend it is `configure' which instantiates (i.e., don't # use $as_me), people would be surprised to read: # /* config.h. Generated by config.status. */ if test x"$ac_file" = x-; then configure_input= else configure_input="$ac_file. " fi configure_input=$configure_input"Generated from `echo $ac_file_in | sed 's,.*/,,'` by configure." # First look for the input files in the build tree, otherwise in the # src tree. ac_file_inputs=`IFS=: for f in $ac_file_in; do case $f in -) echo $tmp/stdin ;; [\\/$]*) # Absolute (can't be DOS-style, as IFS=:) test -f "$f" || { { echo "$as_me:$LINENO: error: cannot find input file: $f" >&5 echo "$as_me: error: cannot find input file: $f" >&2;} { (exit 1); exit 1; }; } echo "$f";; *) # Relative if test -f "$f"; then # Build tree echo "$f" elif test -f "$srcdir/$f"; then # Source tree echo "$srcdir/$f" else # /dev/null tree { { echo "$as_me:$LINENO: error: cannot find input file: $f" >&5 echo "$as_me: error: cannot find input file: $f" >&2;} { (exit 1); exit 1; }; } fi;; esac done` || { (exit 1); exit 1; } _ACEOF cat >>$CONFIG_STATUS <<_ACEOF sed "$ac_vpsub $extrasub _ACEOF cat >>$CONFIG_STATUS <<\_ACEOF :t /@[a-zA-Z_][a-zA-Z_0-9]*@/!b s,@configure_input@,$configure_input,;t t s,@srcdir@,$ac_srcdir,;t t s,@abs_srcdir@,$ac_abs_srcdir,;t t s,@top_srcdir@,$ac_top_srcdir,;t t s,@abs_top_srcdir@,$ac_abs_top_srcdir,;t t s,@builddir@,$ac_builddir,;t t s,@abs_builddir@,$ac_abs_builddir,;t t s,@top_builddir@,$ac_top_builddir,;t t s,@abs_top_builddir@,$ac_abs_top_builddir,;t t s,@INSTALL@,$ac_INSTALL,;t t " $ac_file_inputs | (eval "$ac_sed_cmds") >$tmp/out rm -f $tmp/stdin if test x"$ac_file" != x-; then mv $tmp/out $ac_file else cat $tmp/out rm -f $tmp/out fi done _ACEOF cat >>$CONFIG_STATUS <<\_ACEOF # # CONFIG_HEADER section. # # These sed commands are passed to sed as "A NAME B NAME C VALUE D", where # NAME is the cpp macro being defined and VALUE is the value it is being given. # # ac_d sets the value in "#define NAME VALUE" lines. ac_dA='s,^\([ ]*\)#\([ ]*define[ ][ ]*\)' ac_dB='[ ].*$,\1#\2' ac_dC=' ' ac_dD=',;t' # ac_u turns "#undef NAME" without trailing blanks into "#define NAME VALUE". ac_uA='s,^\([ ]*\)#\([ ]*\)undef\([ ][ ]*\)' ac_uB='$,\1#\2define\3' ac_uC=' ' ac_uD=',;t' for ac_file in : $CONFIG_HEADERS; do test "x$ac_file" = x: && continue # Support "outfile[:infile[:infile...]]", defaulting infile="outfile.in". case $ac_file in - | *:- | *:-:* ) # input from stdin cat >$tmp/stdin ac_file_in=`echo "$ac_file" | sed 's,[^:]*:,,'` ac_file=`echo "$ac_file" | sed 's,:.*,,'` ;; *:* ) ac_file_in=`echo "$ac_file" | sed 's,[^:]*:,,'` ac_file=`echo "$ac_file" | sed 's,:.*,,'` ;; * ) ac_file_in=$ac_file.in ;; esac test x"$ac_file" != x- && { echo "$as_me:$LINENO: creating $ac_file" >&5 echo "$as_me: creating $ac_file" >&6;} # First look for the input files in the build tree, otherwise in the # src tree. ac_file_inputs=`IFS=: for f in $ac_file_in; do case $f in -) echo $tmp/stdin ;; [\\/$]*) # Absolute (can't be DOS-style, as IFS=:) test -f "$f" || { { echo "$as_me:$LINENO: error: cannot find input file: $f" >&5 echo "$as_me: error: cannot find input file: $f" >&2;} { (exit 1); exit 1; }; } # Do quote $f, to prevent DOS paths from being IFS'd. echo "$f";; *) # Relative if test -f "$f"; then # Build tree echo "$f" elif test -f "$srcdir/$f"; then # Source tree echo "$srcdir/$f" else # /dev/null tree { { echo "$as_me:$LINENO: error: cannot find input file: $f" >&5 echo "$as_me: error: cannot find input file: $f" >&2;} { (exit 1); exit 1; }; } fi;; esac done` || { (exit 1); exit 1; } # Remove the trailing spaces. sed 's/[ ]*$//' $ac_file_inputs >$tmp/in _ACEOF # Transform confdefs.h into two sed scripts, `conftest.defines' and # `conftest.undefs', that substitutes the proper values into # config.h.in to produce config.h. The first handles `#define' # templates, and the second `#undef' templates. # And first: Protect against being on the right side of a sed subst in # config.status. Protect against being in an unquoted here document # in config.status. rm -f conftest.defines conftest.undefs # Using a here document instead of a string reduces the quoting nightmare. # Putting comments in sed scripts is not portable. # # `end' is used to avoid that the second main sed command (meant for # 0-ary CPP macros) applies to n-ary macro definitions. # See the Autoconf documentation for `clear'. cat >confdef2sed.sed <<\_ACEOF s/[\\&,]/\\&/g s,[\\$`],\\&,g t clear : clear s,^[ ]*#[ ]*define[ ][ ]*\([^ (][^ (]*\)\(([^)]*)\)[ ]*\(.*\)$,${ac_dA}\1${ac_dB}\1\2${ac_dC}\3${ac_dD},gp t end s,^[ ]*#[ ]*define[ ][ ]*\([^ ][^ ]*\)[ ]*\(.*\)$,${ac_dA}\1${ac_dB}\1${ac_dC}\2${ac_dD},gp : end _ACEOF # If some macros were called several times there might be several times # the same #defines, which is useless. Nevertheless, we may not want to # sort them, since we want the *last* AC-DEFINE to be honored. uniq confdefs.h | sed -n -f confdef2sed.sed >conftest.defines sed 's/ac_d/ac_u/g' conftest.defines >conftest.undefs rm -f confdef2sed.sed # This sed command replaces #undef with comments. This is necessary, for # example, in the case of _POSIX_SOURCE, which is predefined and required # on some systems where configure will not decide to define it. cat >>conftest.undefs <<\_ACEOF s,^[ ]*#[ ]*undef[ ][ ]*[a-zA-Z_][a-zA-Z_0-9]*,/* & */, _ACEOF # Break up conftest.defines because some shells have a limit on the size # of here documents, and old seds have small limits too (100 cmds). echo ' # Handle all the #define templates only if necessary.' >>$CONFIG_STATUS echo ' if grep "^[ ]*#[ ]*define" $tmp/in >/dev/null; then' >>$CONFIG_STATUS echo ' # If there are no defines, we may have an empty if/fi' >>$CONFIG_STATUS echo ' :' >>$CONFIG_STATUS rm -f conftest.tail while grep . conftest.defines >/dev/null do # Write a limited-size here document to $tmp/defines.sed. echo ' cat >$tmp/defines.sed <>$CONFIG_STATUS # Speed up: don't consider the non `#define' lines. echo '/^[ ]*#[ ]*define/!b' >>$CONFIG_STATUS # Work around the forget-to-reset-the-flag bug. echo 't clr' >>$CONFIG_STATUS echo ': clr' >>$CONFIG_STATUS sed ${ac_max_here_lines}q conftest.defines >>$CONFIG_STATUS echo 'CEOF sed -f $tmp/defines.sed $tmp/in >$tmp/out rm -f $tmp/in mv $tmp/out $tmp/in ' >>$CONFIG_STATUS sed 1,${ac_max_here_lines}d conftest.defines >conftest.tail rm -f conftest.defines mv conftest.tail conftest.defines done rm -f conftest.defines echo ' fi # grep' >>$CONFIG_STATUS echo >>$CONFIG_STATUS # Break up conftest.undefs because some shells have a limit on the size # of here documents, and old seds have small limits too (100 cmds). echo ' # Handle all the #undef templates' >>$CONFIG_STATUS rm -f conftest.tail while grep . conftest.undefs >/dev/null do # Write a limited-size here document to $tmp/undefs.sed. echo ' cat >$tmp/undefs.sed <>$CONFIG_STATUS # Speed up: don't consider the non `#undef' echo '/^[ ]*#[ ]*undef/!b' >>$CONFIG_STATUS # Work around the forget-to-reset-the-flag bug. echo 't clr' >>$CONFIG_STATUS echo ': clr' >>$CONFIG_STATUS sed ${ac_max_here_lines}q conftest.undefs >>$CONFIG_STATUS echo 'CEOF sed -f $tmp/undefs.sed $tmp/in >$tmp/out rm -f $tmp/in mv $tmp/out $tmp/in ' >>$CONFIG_STATUS sed 1,${ac_max_here_lines}d conftest.undefs >conftest.tail rm -f conftest.undefs mv conftest.tail conftest.undefs done rm -f conftest.undefs cat >>$CONFIG_STATUS <<\_ACEOF # Let's still pretend it is `configure' which instantiates (i.e., don't # use $as_me), people would be surprised to read: # /* config.h. Generated by config.status. */ if test x"$ac_file" = x-; then echo "/* Generated by configure. */" >$tmp/config.h else echo "/* $ac_file. Generated by configure. */" >$tmp/config.h fi cat $tmp/in >>$tmp/config.h rm -f $tmp/in if test x"$ac_file" != x-; then if diff $ac_file $tmp/config.h >/dev/null 2>&1; then { echo "$as_me:$LINENO: $ac_file is unchanged" >&5 echo "$as_me: $ac_file is unchanged" >&6;} else ac_dir=`(dirname "$ac_file") 2>/dev/null || $as_expr X"$ac_file" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ X"$ac_file" : 'X\(//\)[^/]' \| \ X"$ac_file" : 'X\(//\)$' \| \ X"$ac_file" : 'X\(/\)' \| \ . : '\(.\)' 2>/dev/null || echo X"$ac_file" | sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ s//\1/; q; } /^X\(\/\/\)[^/].*/{ s//\1/; q; } /^X\(\/\/\)$/{ s//\1/; q; } /^X\(\/\).*/{ s//\1/; q; } s/.*/./; q'` { if $as_mkdir_p; then mkdir -p "$ac_dir" else as_dir="$ac_dir" as_dirs= while test ! -d "$as_dir"; do as_dirs="$as_dir $as_dirs" as_dir=`(dirname "$as_dir") 2>/dev/null || $as_expr X"$as_dir" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ X"$as_dir" : 'X\(//\)[^/]' \| \ X"$as_dir" : 'X\(//\)$' \| \ X"$as_dir" : 'X\(/\)' \| \ . : '\(.\)' 2>/dev/null || echo X"$as_dir" | sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ s//\1/; q; } /^X\(\/\/\)[^/].*/{ s//\1/; q; } /^X\(\/\/\)$/{ s//\1/; q; } /^X\(\/\).*/{ s//\1/; q; } s/.*/./; q'` done test ! -n "$as_dirs" || mkdir $as_dirs fi || { { echo "$as_me:$LINENO: error: cannot create directory \"$ac_dir\"" >&5 echo "$as_me: error: cannot create directory \"$ac_dir\"" >&2;} { (exit 1); exit 1; }; }; } rm -f $ac_file mv $tmp/config.h $ac_file fi else cat $tmp/config.h rm -f $tmp/config.h fi # Compute $ac_file's index in $config_headers. _am_stamp_count=1 for _am_header in $config_headers :; do case $_am_header in $ac_file | $ac_file:* ) break ;; * ) _am_stamp_count=`expr $_am_stamp_count + 1` ;; esac done echo "timestamp for $ac_file" >`(dirname $ac_file) 2>/dev/null || $as_expr X$ac_file : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ X$ac_file : 'X\(//\)[^/]' \| \ X$ac_file : 'X\(//\)$' \| \ X$ac_file : 'X\(/\)' \| \ . : '\(.\)' 2>/dev/null || echo X$ac_file | sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ s//\1/; q; } /^X\(\/\/\)[^/].*/{ s//\1/; q; } /^X\(\/\/\)$/{ s//\1/; q; } /^X\(\/\).*/{ s//\1/; q; } s/.*/./; q'`/stamp-h$_am_stamp_count done _ACEOF cat >>$CONFIG_STATUS <<\_ACEOF # # CONFIG_COMMANDS section. # for ac_file in : $CONFIG_COMMANDS; do test "x$ac_file" = x: && continue ac_dest=`echo "$ac_file" | sed 's,:.*,,'` ac_source=`echo "$ac_file" | sed 's,[^:]*:,,'` ac_dir=`(dirname "$ac_dest") 2>/dev/null || $as_expr X"$ac_dest" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ X"$ac_dest" : 'X\(//\)[^/]' \| \ X"$ac_dest" : 'X\(//\)$' \| \ X"$ac_dest" : 'X\(/\)' \| \ . : '\(.\)' 2>/dev/null || echo X"$ac_dest" | sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ s//\1/; q; } /^X\(\/\/\)[^/].*/{ s//\1/; q; } /^X\(\/\/\)$/{ s//\1/; q; } /^X\(\/\).*/{ s//\1/; q; } s/.*/./; q'` { if $as_mkdir_p; then mkdir -p "$ac_dir" else as_dir="$ac_dir" as_dirs= while test ! -d "$as_dir"; do as_dirs="$as_dir $as_dirs" as_dir=`(dirname "$as_dir") 2>/dev/null || $as_expr X"$as_dir" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ X"$as_dir" : 'X\(//\)[^/]' \| \ X"$as_dir" : 'X\(//\)$' \| \ X"$as_dir" : 'X\(/\)' \| \ . : '\(.\)' 2>/dev/null || echo X"$as_dir" | sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ s//\1/; q; } /^X\(\/\/\)[^/].*/{ s//\1/; q; } /^X\(\/\/\)$/{ s//\1/; q; } /^X\(\/\).*/{ s//\1/; q; } s/.*/./; q'` done test ! -n "$as_dirs" || mkdir $as_dirs fi || { { echo "$as_me:$LINENO: error: cannot create directory \"$ac_dir\"" >&5 echo "$as_me: error: cannot create directory \"$ac_dir\"" >&2;} { (exit 1); exit 1; }; }; } ac_builddir=. if test "$ac_dir" != .; then ac_dir_suffix=/`echo "$ac_dir" | sed 's,^\.[\\/],,'` # A "../" for each directory in $ac_dir_suffix. ac_top_builddir=`echo "$ac_dir_suffix" | sed 's,/[^\\/]*,../,g'` else ac_dir_suffix= ac_top_builddir= fi case $srcdir in .) # No --srcdir option. We are building in place. ac_srcdir=. if test -z "$ac_top_builddir"; then ac_top_srcdir=. else ac_top_srcdir=`echo $ac_top_builddir | sed 's,/$,,'` fi ;; [\\/]* | ?:[\\/]* ) # Absolute path. ac_srcdir=$srcdir$ac_dir_suffix; ac_top_srcdir=$srcdir ;; *) # Relative path. ac_srcdir=$ac_top_builddir$srcdir$ac_dir_suffix ac_top_srcdir=$ac_top_builddir$srcdir ;; esac # Do not use `cd foo && pwd` to compute absolute paths, because # the directories may not exist. case `pwd` in .) ac_abs_builddir="$ac_dir";; *) case "$ac_dir" in .) ac_abs_builddir=`pwd`;; [\\/]* | ?:[\\/]* ) ac_abs_builddir="$ac_dir";; *) ac_abs_builddir=`pwd`/"$ac_dir";; esac;; esac case $ac_abs_builddir in .) ac_abs_top_builddir=${ac_top_builddir}.;; *) case ${ac_top_builddir}. in .) ac_abs_top_builddir=$ac_abs_builddir;; [\\/]* | ?:[\\/]* ) ac_abs_top_builddir=${ac_top_builddir}.;; *) ac_abs_top_builddir=$ac_abs_builddir/${ac_top_builddir}.;; esac;; esac case $ac_abs_builddir in .) ac_abs_srcdir=$ac_srcdir;; *) case $ac_srcdir in .) ac_abs_srcdir=$ac_abs_builddir;; [\\/]* | ?:[\\/]* ) ac_abs_srcdir=$ac_srcdir;; *) ac_abs_srcdir=$ac_abs_builddir/$ac_srcdir;; esac;; esac case $ac_abs_builddir in .) ac_abs_top_srcdir=$ac_top_srcdir;; *) case $ac_top_srcdir in .) ac_abs_top_srcdir=$ac_abs_builddir;; [\\/]* | ?:[\\/]* ) ac_abs_top_srcdir=$ac_top_srcdir;; *) ac_abs_top_srcdir=$ac_abs_builddir/$ac_top_srcdir;; esac;; esac { echo "$as_me:$LINENO: executing $ac_dest commands" >&5 echo "$as_me: executing $ac_dest commands" >&6;} case $ac_dest in depfiles ) test x"$AMDEP_TRUE" != x"" || for mf in $CONFIG_FILES; do # Strip MF so we end up with the name of the file. mf=`echo "$mf" | sed -e 's/:.*$//'` # Check whether this is an Automake generated Makefile or not. # We used to match only the files named `Makefile.in', but # some people rename them; so instead we look at the file content. # Grep'ing the first line is not enough: some people post-process # each Makefile.in and add a new line on top of each file to say so. # So let's grep whole file. if grep '^#.*generated by automake' $mf > /dev/null 2>&1; then dirpart=`(dirname "$mf") 2>/dev/null || $as_expr X"$mf" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ X"$mf" : 'X\(//\)[^/]' \| \ X"$mf" : 'X\(//\)$' \| \ X"$mf" : 'X\(/\)' \| \ . : '\(.\)' 2>/dev/null || echo X"$mf" | sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ s//\1/; q; } /^X\(\/\/\)[^/].*/{ s//\1/; q; } /^X\(\/\/\)$/{ s//\1/; q; } /^X\(\/\).*/{ s//\1/; q; } s/.*/./; q'` else continue fi # Extract the definition of DEPDIR, am__include, and am__quote # from the Makefile without running `make'. DEPDIR=`sed -n 's/^DEPDIR = //p' < "$mf"` test -z "$DEPDIR" && continue am__include=`sed -n 's/^am__include = //p' < "$mf"` test -z "am__include" && continue am__quote=`sed -n 's/^am__quote = //p' < "$mf"` # When using ansi2knr, U may be empty or an underscore; expand it U=`sed -n 's/^U = //p' < "$mf"` # Find all dependency output files, they are included files with # $(DEPDIR) in their names. We invoke sed twice because it is the # simplest approach to changing $(DEPDIR) to its actual value in the # expansion. for file in `sed -n " s/^$am__include $am__quote\(.*(DEPDIR).*\)$am__quote"'$/\1/p' <"$mf" | \ sed -e 's/\$(DEPDIR)/'"$DEPDIR"'/g' -e 's/\$U/'"$U"'/g'`; do # Make sure the directory exists. test -f "$dirpart/$file" && continue fdir=`(dirname "$file") 2>/dev/null || $as_expr X"$file" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ X"$file" : 'X\(//\)[^/]' \| \ X"$file" : 'X\(//\)$' \| \ X"$file" : 'X\(/\)' \| \ . : '\(.\)' 2>/dev/null || echo X"$file" | sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ s//\1/; q; } /^X\(\/\/\)[^/].*/{ s//\1/; q; } /^X\(\/\/\)$/{ s//\1/; q; } /^X\(\/\).*/{ s//\1/; q; } s/.*/./; q'` { if $as_mkdir_p; then mkdir -p $dirpart/$fdir else as_dir=$dirpart/$fdir as_dirs= while test ! -d "$as_dir"; do as_dirs="$as_dir $as_dirs" as_dir=`(dirname "$as_dir") 2>/dev/null || $as_expr X"$as_dir" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ X"$as_dir" : 'X\(//\)[^/]' \| \ X"$as_dir" : 'X\(//\)$' \| \ X"$as_dir" : 'X\(/\)' \| \ . : '\(.\)' 2>/dev/null || echo X"$as_dir" | sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ s//\1/; q; } /^X\(\/\/\)[^/].*/{ s//\1/; q; } /^X\(\/\/\)$/{ s//\1/; q; } /^X\(\/\).*/{ s//\1/; q; } s/.*/./; q'` done test ! -n "$as_dirs" || mkdir $as_dirs fi || { { echo "$as_me:$LINENO: error: cannot create directory $dirpart/$fdir" >&5 echo "$as_me: error: cannot create directory $dirpart/$fdir" >&2;} { (exit 1); exit 1; }; }; } # echo "creating $dirpart/$file" echo '# dummy' > "$dirpart/$file" done done ;; esac done _ACEOF cat >>$CONFIG_STATUS <<\_ACEOF { (exit 0); exit 0; } _ACEOF chmod +x $CONFIG_STATUS ac_clean_files=$ac_clean_files_save # configure is writing to config.log, and then calls config.status. # config.status does its own redirection, appending to config.log. # Unfortunately, on DOS this fails, as config.log is still kept open # by configure, so config.status won't be able to write to it; its # output is simply discarded. So we exec the FD to /dev/null, # effectively closing config.log, so it can be properly (re)opened and # appended to by config.status. When coming back to configure, we # need to make the FD available again. if test "$no_create" != yes; then ac_cs_success=: ac_config_status_args= test "$silent" = yes && ac_config_status_args="$ac_config_status_args --quiet" exec 5>/dev/null $SHELL $CONFIG_STATUS $ac_config_status_args || ac_cs_success=false exec 5>>config.log # Use ||, not &&, to avoid exiting from the if with $? = 1, which # would make configure fail if this is the last instruction. $ac_cs_success || { (exit 1); exit 1; } fi ruby-mkrf-0.2.3.orig/test/sample_files/syck-0.55/README0000644000000000000000000000477211672453175020754 0ustar rootroot . syck . [ version 0.54 ] INSTALLATION ./configure make make check sudo make install If the unit tests don't pass, notify me immediately. This distribution is tested on FreeBSD and Linux. I don't release it unless the tests pass on those machines. If tests aren't passing, then that's a problem. ABOUT Syck is the Scripters' YAML Cobble-Yourself-a-Parser Kit. I don't much care if the acronym works, as long as the library does! The whole point of Syck is to make parsing and emitting YAML very simple for scripting languages through C bindings. It doesn't strive to be a pull parser or very extendible. It just is concerned with loading a YAML document into a C structure which can be easily translated into a scripting language's internal native data type. RUBY INSTALLATION You don't need to `make install', but please configure and make libsyck as outlined above. cd ext/ruby ruby install.rb config ruby install.rb setup sudo ruby install.rb install Syck works best with Ruby. Ruby's symbol table is leveraged, as well as Ruby's VALUE system. (You can read more about that below.) Syck is now included with Ruby (beginning with Ruby 1.8.0.) Please voice your support for Syck/YAML in Ruby distributions on the various platforms. PYTHON INSTALLATION You'll need to `make install' as described above. cd ext/python/ python setup.py build sudo python setup.py install PHP INSTALLATION You'll need to `make install' as described above. cd ext/php/ sh make_module.sh sudo make install HOW SYCK IS SO GREAT For example, in Ruby everything evaluates to a VALUE. I merely supply a handler to Syck that will take a SyckNode and transform it into a Ruby VALUE. A simple Ruby YAML::load could be built like so: static VALUE YAML_load( VALUE str ) { SyckParser* parser; parser = syck_new_parser(); syck_parser_handler( parser, YAML_handler ); return syck_parse( parser, str ); } static VALUE YAML_handler( SyckNode* node ) { switch( node->kind ) { case SYCK_MAP: VALUE key; VALUE h = rb_hash_new(); for ( key = node->content[0]; key != null; key++ ) { rb_hash_set( h, key, key++ ); } return h; break; } } For most C developers, it should be a no-brainer to bring basic YAML serialization to PHP, Tcl, Cocoa, etc. Instructions for using Syck's API are available in the README.EXT in this very same directory. ruby-mkrf-0.2.3.orig/test/sample_files/syck-0.55/stamp-h10000644000000000000000000000002711672453175021436 0ustar rootroottimestamp for config.h ruby-mkrf-0.2.3.orig/test/sample_files/syck-0.55/config.h0000644000000000000000000000427011672453175021503 0ustar rootroot/* config.h. Generated by configure. */ /* config.h.in. Generated from configure.in by autoheader. */ /* Define to 1 if you have the header file. */ #define HAVE_ALLOCA_H 1 /* Define to 1 if you don't have `vprintf' but do have `_doprnt.' */ /* #undef HAVE_DOPRNT */ /* Define to 1 if you have the header file. */ #define HAVE_INTTYPES_H 1 /* Define to 1 if your system has a GNU libc compatible `malloc' function, and to 0 otherwise. */ #define HAVE_MALLOC 1 /* Define to 1 if you have the header file. */ #define HAVE_MEMORY_H 1 /* Define to 1 if you have the header file. */ #define HAVE_STDINT_H 1 /* Define to 1 if you have the header file. */ #define HAVE_STDLIB_H 1 /* Define to 1 if you have the header file. */ #define HAVE_STRINGS_H 1 /* Define to 1 if you have the header file. */ #define HAVE_STRING_H 1 /* Define to 1 if you have the header file. */ #define HAVE_SYS_STAT_H 1 /* Define to 1 if you have the header file. */ #define HAVE_SYS_TYPES_H 1 /* Define to 1 if you have the header file. */ #define HAVE_UNISTD_H 1 /* Define to 1 if you have the `vprintf' function. */ #define HAVE_VPRINTF 1 /* Name of package */ #define PACKAGE "syck" /* Define to the address where bug reports for this package should be sent. */ #define PACKAGE_BUGREPORT "" /* Define to the full name of this package. */ #define PACKAGE_NAME "syck" /* Define to the full name and version of this package. */ #define PACKAGE_STRING "syck 0.54" /* Define to the one symbol short name of this package. */ #define PACKAGE_TARNAME "syck" /* Define to the version of this package. */ #define PACKAGE_VERSION "0.54" /* The size of a `int', as computed by sizeof. */ #define SIZEOF_INT 4 /* The size of a `long', as computed by sizeof. */ #define SIZEOF_LONG 4 /* Define to 1 if you have the ANSI C header files. */ #define STDC_HEADERS 1 /* Version number of package */ #define VERSION "0.54" /* Define to 1 if `lex' declares `yytext' as a `char *' by default, not a `char[]'. */ #define YYTEXT_POINTER 1 /* Define to rpl_malloc if the replacement function should be used. */ /* #undef malloc */ ruby-mkrf-0.2.3.orig/test/sample_files/syck-0.55/TODO0000644000000000000000000000132511672453175020553 0ustar rootroot## Syck 0.60 [x] Resolver needs to handle implicits [x] yaml_org_handler is an integral part of DefaultResolver#transfer [x] cNode must be central in-between datatype [x] The values will be fully loaded Hash, Array, String (Close, ended up using YAML::Syck::Seq, etc.) [x] This way node_import can take pure Ruby nodes [x] Incorporate BaseNode. [-] Fix typed seq-in-map shortcut. [x] Emitter should figure out shortcuts. [-] Ensure that common memory leaks are GONE by 0.60. [-] UTF-8 strict mode. ## Syck 0.70 [-] YPath integrated into Syck's core [-] Implicits can be collections with YPath matching [-] UTF-16 and -32 support. ## Syck 0.80 [-] Python extension with complete hooks. [-] YAML 1.1 support. ruby-mkrf-0.2.3.orig/test/sample_files/syck-0.55/bootstrap0000644000000000000000000000012711672453175022022 0ustar rootroot#! /bin/sh set -x aclocal autoheader automake --foreign --add-missing --copy autoconf ruby-mkrf-0.2.3.orig/test/sample_files/syck-0.55/Makefile.am0000644000000000000000000000013211672453175022112 0ustar rootroot# # I feel like saying, "The magic happens here!" But it doesn't. # SUBDIRS = lib tests ruby-mkrf-0.2.3.orig/test/sample_files/syck-0.55/config.h.in0000644000000000000000000000403111672453175022103 0ustar rootroot/* config.h.in. Generated from configure.in by autoheader. */ /* Define to 1 if you have the header file. */ #undef HAVE_ALLOCA_H /* Define to 1 if you don't have `vprintf' but do have `_doprnt.' */ #undef HAVE_DOPRNT /* Define to 1 if you have the header file. */ #undef HAVE_INTTYPES_H /* Define to 1 if your system has a GNU libc compatible `malloc' function, and to 0 otherwise. */ #undef HAVE_MALLOC /* Define to 1 if you have the header file. */ #undef HAVE_MEMORY_H /* Define to 1 if you have the header file. */ #undef HAVE_STDINT_H /* Define to 1 if you have the header file. */ #undef HAVE_STDLIB_H /* Define to 1 if you have the header file. */ #undef HAVE_STRINGS_H /* Define to 1 if you have the header file. */ #undef HAVE_STRING_H /* Define to 1 if you have the header file. */ #undef HAVE_SYS_STAT_H /* Define to 1 if you have the header file. */ #undef HAVE_SYS_TYPES_H /* Define to 1 if you have the header file. */ #undef HAVE_UNISTD_H /* Define to 1 if you have the `vprintf' function. */ #undef HAVE_VPRINTF /* Name of package */ #undef PACKAGE /* Define to the address where bug reports for this package should be sent. */ #undef PACKAGE_BUGREPORT /* Define to the full name of this package. */ #undef PACKAGE_NAME /* Define to the full name and version of this package. */ #undef PACKAGE_STRING /* Define to the one symbol short name of this package. */ #undef PACKAGE_TARNAME /* Define to the version of this package. */ #undef PACKAGE_VERSION /* The size of a `int', as computed by sizeof. */ #undef SIZEOF_INT /* The size of a `long', as computed by sizeof. */ #undef SIZEOF_LONG /* Define to 1 if you have the ANSI C header files. */ #undef STDC_HEADERS /* Version number of package */ #undef VERSION /* Define to 1 if `lex' declares `yytext' as a `char *' by default, not a `char[]'. */ #undef YYTEXT_POINTER /* Define to rpl_malloc if the replacement function should be used. */ #undef malloc ruby-mkrf-0.2.3.orig/test/sample_files/syck-0.55/ext/0000755000000000000000000000000011672453175020662 5ustar rootrootruby-mkrf-0.2.3.orig/test/sample_files/syck-0.55/ext/ruby/0000755000000000000000000000000011672453175021643 5ustar rootrootruby-mkrf-0.2.3.orig/test/sample_files/syck-0.55/ext/ruby/yts/0000755000000000000000000000000011672453175022462 5ustar rootrootruby-mkrf-0.2.3.orig/test/sample_files/syck-0.55/ext/ruby/yts/YtsBlockMapping.yml0000644000000000000000000000243311672453175026255 0ustar rootroot--- test: One Element Mapping brief: | A mapping with one key/value pair yaml: | --- foo: bar perl: | {foo => 'bar'} ruby: | { 'foo' => 'bar' } --- test: Multi Element Mapping brief: | More than one key/value pair yaml: | --- red: baron white: walls blue: berries perl: | { red => 'baron', white => 'walls', blue => 'berries', } ruby: | { 'red' => 'baron', 'white' => 'walls', 'blue' => 'berries', } --- test: Values aligned brief: | Often times human editors of documents will align the values even though YAML emitters generally don't. yaml: | --- red: baron white: walls blue: berries perl: | { red => 'baron', white => 'walls', blue => 'berries', } ruby: | { 'red' => 'baron', 'white' => 'walls', 'blue' => 'berries', } --- test: Colons aligned brief: | Spaces can come before the ': ' key/value separator. yaml: | --- red : baron white : walls blue : berries perl: | { red => 'baron', white => 'walls', blue => 'berries', } ruby: | { 'red' => 'baron', 'white' => 'walls', 'blue' => 'berries', } ruby-mkrf-0.2.3.orig/test/sample_files/syck-0.55/ext/ruby/yts/YtsBasicTests.yml0000644000000000000000000001345011672453175025754 0ustar rootroot--- %YAML:1.0 test: Simple Sequence brief: | You can specify a list in YAML by placing each member of the list on a new line with an opening dash. These lists are called sequences. yaml: | - apple - banana - carrot perl: | ['apple', 'banana', 'carrot'] python: | [ ['apple', 'banana', 'carrot'] ] ruby: | ['apple', 'banana', 'carrot'] --- test: Nested Sequences brief: | You can include a sequence within another sequence by giving the sequence an empty dash, followed by an indented list. yaml: | - - foo - bar - baz perl: | [['foo', 'bar', 'baz']] python: | [ [['foo', 'bar', 'baz']] ] ruby: | [['foo', 'bar', 'baz']] --- test: Mixed Sequences brief: | Sequences can contain any YAML data, including strings and other sequences. yaml: | - apple - - foo - bar - x123 - banana - carrot perl: | ['apple', ['foo', 'bar', 'x123'], 'banana', 'carrot'] python: | [ ['apple', ['foo', 'bar', 'x123'], 'banana', 'carrot'] ] ruby: | ['apple', ['foo', 'bar', 'x123'], 'banana', 'carrot'] --- test: Deeply Nested Sequences brief: | Sequences can be nested even deeper, with each level of indentation representing a level of depth. yaml: | - - - uno - dos perl: | [[['uno', 'dos']]] python: | [ [[['uno', 'dos']]] ] ruby: | [[['uno', 'dos']]] --- test: Simple Mapping brief: | You can add a keyed list (also known as a dictionary or hash) to your document by placing each member of the list on a new line, with a colon seperating the key from its value. In YAML, this type of list is called a mapping. yaml: | foo: whatever bar: stuff perl: | { foo => 'whatever', bar => 'stuff' } python: | [ {'foo': 'whatever', 'bar': 'stuff'} ] ruby: | { 'foo' => 'whatever', 'bar' => 'stuff' } --- test: Sequence in a Mapping brief: | A value in a mapping can be a sequence. yaml: | foo: whatever bar: - uno - dos perl: | { foo => 'whatever', bar => [ 'uno', 'dos' ] } python: | [ {'foo': 'whatever', 'bar': ['uno', 'dos']} ] ruby: | { 'foo' => 'whatever', 'bar' => [ 'uno', 'dos' ] } --- test: Nested Mappings brief: | A value in a mapping can be another mapping. yaml: | foo: whatever bar: fruit: apple name: steve sport: baseball perl: | { foo => 'whatever', bar => { fruit => 'apple', name => 'steve', sport => 'baseball' } } python: | [ {'foo': 'whatever', 'bar': { 'fruit': 'apple', 'name': 'steve', 'sport': 'baseball' } } ] ruby: | { 'foo' => 'whatever', 'bar' => { 'fruit' => 'apple', 'name' => 'steve', 'sport' => 'baseball' } } --- test: Mixed Mapping brief: | A mapping can contain any assortment of mappings and sequences as values. yaml: | foo: whatever bar: - fruit: apple name: steve sport: baseball - more - python: rocks perl: papers ruby: scissorses perl: | { foo => 'whatever', bar => [ { fruit => 'apple', name => 'steve', sport => 'baseball' }, 'more', { python => 'rocks', perl => 'papers', ruby => 'scissorses' } ] } python: | [ {'foo': 'whatever', 'bar': [ { 'fruit': 'apple', 'name': 'steve', 'sport': 'baseball' }, 'more', { 'python': 'rocks', 'perl': 'papers', 'ruby': 'scissorses' } ] } ] ruby: | { 'foo' => 'whatever', 'bar' => [ { 'fruit' => 'apple', 'name' => 'steve', 'sport' => 'baseball' }, 'more', { 'python' => 'rocks', 'perl' => 'papers', 'ruby' => 'scissorses' } ] } --- test: Mapping-in-Sequence Shortcut brief: | If you are adding a mapping to a sequence, you can place the mapping on the same line as the dash as a shortcut. yaml: | - work on YAML.py: - work on Store perl: | [ { 'work on YAML.py' => ['work on Store'] } ] python: | [ [ {'work on YAML.py': ['work on Store']} ] ] ruby: | [ { 'work on YAML.py' => ['work on Store'] } ] --- test: Sequence-in-Mapping Shortcut brief: | The dash in a sequence counts as indentation, so you can add a sequence inside of a mapping without needing spaces as indentation. yaml: | allow: - 'localhost' - '%.sourceforge.net' - '%.freepan.org' perl: | { 'allow' => [ 'localhost', '%.sourceforge.net', '%.freepan.org' ] } python: | [ { 'allow': [ 'localhost', '%.sourceforge.net', '%.freepan.org' ] } ] ruby: | { 'allow' => [ 'localhost', '%.sourceforge.net', '%.freepan.org' ] } --- test: Merge key brief: | A merge key ('<<') can be used in a mapping to insert other mappings. If the value associated with the merge key is a mapping, each of its key/value pairs is inserted into the current mapping. yaml: | mapping: name: Joe job: Accountant <<: age: 38 ruby: | { 'mapping' => { 'name' => 'Joe', 'job' => 'Accountant', 'age' => 38 } } ruby-mkrf-0.2.3.orig/test/sample_files/syck-0.55/ext/ruby/yts/yts.rb0000644000000000000000000001103411672453175023625 0ustar rootroot# vim:sw=4:ts=4 # $Id: yts.rb,v 1.3 2005/03/28 14:29:15 why Exp $ # require 'yaml' module YAML # # Make a time with the time zone # def YAML::mktime( year, mon, day, hour, min, sec, usec, zone = "Z" ) usec = usec.to_s.to_f * 1000000 val = Time::utc( year.to_i, mon.to_i, day.to_i, hour.to_i, min.to_i, sec.to_i, usec ) if zone != "Z" hour = zone[0,3].to_i * 3600 min = zone[3,2].to_i * 60 ofs = (hour + min) val = Time.at( val.to_f - ofs ) end return val end class Stream def ==( doc ) self.documents == doc.documents end end class PrivateType def ==( pt ) self.type_id == pt.type_id and self.value == pt.value end end class DomainType def ==( dt ) self.domain == dt.domain and self.type_id == dt.type_id and self.value == dt.value end end class SpecialHash def ==( h ) if h.is_a? SpecialHash self.default == h.default and not self.keys.detect { |k| self[k] != h[k] } else false end end def inspect "{SpecialHash: @default=#{@default} @hash=#{super}}" end end end header = " %YAML:1.0" YAML::load( File.read( "index.yml" ) ).each do |yst| YAML.load_documents( File.read( yst + ".yml" ) ) do |ydoc| # # Test the document # reason = nil success = 'no' round_trip = 'no' round_out = nil interval = nil if ydoc.has_key?( 'ruby' ) obj_r = nil obj_y = nil begin eval( ydoc['ruby-setup'] ) if ydoc.has_key?( 'ruby-setup' ) time = Time.now ydoc['yaml'].gsub!( / +$/, '' ) if ydoc.has_key?( 'documents' ) obj_y = YAML::load_stream( ydoc['yaml'] ) else obj_y = YAML::load( ydoc['yaml'] ) end interval = Time.now - time eval( "obj_r = #{ydoc['ruby']}" ) if obj_r == obj_y success = 'yes' # Attempt round trip unless ydoc['no-round-trip'].is_a?( Array ) and ydoc['no-round-trip'].include?( 'ruby' ) obj_y2 = nil begin if obj_y.is_a? YAML::Stream round_out = obj_y.emit obj_y2 = YAML::load_stream( round_out ) else round_out = obj_y.to_yaml obj_y2 = YAML::load( round_out ) end rescue YAML::Error => e reason = e.to_s end obj_y = obj_y2 eval( "obj_r = #{ydoc['ruby']}" ) if obj_r == obj_y round_trip = 'yes' else reason = 'Expected <' + obj_r.inspect + '>, but was <' + obj_y2.inspect + '>' end else end else reason = 'Expected <' + obj_r.inspect + '>, but was <' + obj_y.inspect + '>' end rescue Exception => e reason = e.to_s end else reason = 'No Ruby parse information available in the test document.' end # # Print out YAML result # puts <, but was <' + paths.inspect + '>' end rescue YAML::Error => e reason = e.to_s end puts < You can separate YAML documents with a string of three dashes. yaml: | - foo: 1 bar: 2 --- more: stuff python: | [ [ { 'foo': 1, 'bar': 2 } ], { 'more': 'stuff' } ] ruby: | [ { 'foo' => 1, 'bar' => 2 } ] --- test: Leading Document Separator brief: > You can explicity give an opening document separator to your YAML stream. yaml: | --- - foo: 1 bar: 2 --- more: stuff python: | [ [ {'foo': 1, 'bar': 2}], {'more': 'stuff'} ] ruby: | [ { 'foo' => 1, 'bar' => 2 } ] --- test: YAML Header brief: > The opening separator can contain directives to the YAML parser, such as the version number. yaml: | --- %YAML:1.0 foo: 1 bar: 2 python: | [ { 'foo': 1, 'bar': 2 } ] ruby: | y = YAML::Stream.new y.add( { 'foo' => 1, 'bar' => 2 } ) documents: 1 --- test: Red Herring Document Separator brief: > Separators included in blocks or strings are treated as blocks or strings, as the document separator should have no indentation preceding it. yaml: | foo: | --- python: | [ { 'foo': "---\n" } ] ruby: | { 'foo' => "---\n" } --- test: Multiple Document Separators in Block brief: > This technique allows you to embed other YAML documents within literal blocks. yaml: | foo: | --- foo: bar --- yo: baz bar: | fooness python: | [ { 'foo': flushLeft(""" --- foo: bar --- yo: baz """), 'bar': "fooness\n" } ] ruby: | { 'foo' => "---\nfoo: bar\n---\nyo: baz\n", 'bar' => "fooness\n" } ruby-mkrf-0.2.3.orig/test/sample_files/syck-0.55/ext/ruby/yts/YtsSpecificationExamples.yml0000644000000000000000000020462711672453175030177 0ustar rootroot--- %YAML:1.0 test: Sequence of scalars spec: 2.1 yaml: | - Mark McGwire - Sammy Sosa - Ken Griffey perl: | [ 'Mark McGwire', 'Sammy Sosa', 'Ken Griffey' ] python: | [ ['Mark McGwire', 'Sammy Sosa', 'Ken Griffey'] ] ruby: | [ 'Mark McGwire', 'Sammy Sosa', 'Ken Griffey' ] syck: | struct test_node seq[] = { { T_STR, 0, "Mark McGwire" }, { T_STR, 0, "Sammy Sosa" }, { T_STR, 0, "Ken Griffey" }, end_node }; struct test_node stream[] = { { T_SEQ, 0, 0, seq }, end_node }; --- test: Mapping of scalars to scalars spec: 2.2 yaml: | hr: 65 avg: 0.278 rbi: 147 perl: | { hr => 65, avg => 0.278, rbi => 147 } python: | [ {'hr': 65, 'avg': .278, 'rbi': 147} ] ruby: | { 'hr' => 65, 'avg' => 0.278, 'rbi' => 147 } syck: | struct test_node map[] = { { T_STR, 0, "hr" }, { T_STR, 0, "65" }, { T_STR, 0, "avg" }, { T_STR, 0, "0.278" }, { T_STR, 0, "rbi" }, { T_STR, 0, "147" }, end_node }; struct test_node stream[] = { { T_MAP, 0, 0, map }, end_node }; --- test: Mapping of scalars to sequences spec: 2.3 yaml: | american: - Boston Red Sox - Detroit Tigers - New York Yankees national: - New York Mets - Chicago Cubs - Atlanta Braves perl: | { american => [ 'Boston Red Sox', 'Detroit Tigers', 'New York Yankees' ], national => [ 'New York Mets', 'Chicago Cubs', 'Atlanta Braves' ] } python: | [ { 'american': ['Boston Red Sox', 'Detroit Tigers', 'New York Yankees'], 'national': ['New York Mets', 'Chicago Cubs', 'Atlanta Braves'] } ] ruby: | { 'american' => [ 'Boston Red Sox', 'Detroit Tigers', 'New York Yankees' ], 'national' => [ 'New York Mets', 'Chicago Cubs', 'Atlanta Braves' ] } syck: | struct test_node seq1[] = { { T_STR, 0, "Boston Red Sox" }, { T_STR, 0, "Detroit Tigers" }, { T_STR, 0, "New York Yankees" }, end_node }; struct test_node seq2[] = { { T_STR, 0, "New York Mets" }, { T_STR, 0, "Chicago Cubs" }, { T_STR, 0, "Atlanta Braves" }, end_node }; struct test_node map[] = { { T_STR, 0, "american" }, { T_SEQ, 0, 0, seq1 }, { T_STR, 0, "national" }, { T_SEQ, 0, 0, seq2 }, end_node }; struct test_node stream[] = { { T_MAP, 0, 0, map }, end_node }; --- test: Sequence of mappings spec: 2.4 yaml: | - name: Mark McGwire hr: 65 avg: 0.278 - name: Sammy Sosa hr: 63 avg: 0.288 perl: | [ {name => 'Mark McGwire', hr => 65, avg => 0.278}, {name => 'Sammy Sosa', hr => 63, avg => 0.288} ] python: | [[ { 'name': 'Mark McGwire', 'hr': 65, 'avg': 0.278 }, { 'name': 'Sammy Sosa', 'hr': 63, 'avg': 0.288 } ]] ruby: | [ {'name' => 'Mark McGwire', 'hr' => 65, 'avg' => 0.278}, {'name' => 'Sammy Sosa', 'hr' => 63, 'avg' => 0.288} ] syck: | struct test_node map1[] = { { T_STR, 0, "name" }, { T_STR, 0, "Mark McGwire" }, { T_STR, 0, "hr" }, { T_STR, 0, "65" }, { T_STR, 0, "avg" }, { T_STR, 0, "0.278" }, end_node }; struct test_node map2[] = { { T_STR, 0, "name" }, { T_STR, 0, "Sammy Sosa" }, { T_STR, 0, "hr" }, { T_STR, 0, "63" }, { T_STR, 0, "avg" }, { T_STR, 0, "0.288" }, end_node }; struct test_node seq[] = { { T_MAP, 0, 0, map1 }, { T_MAP, 0, 0, map2 }, end_node }; struct test_node stream[] = { { T_SEQ, 0, 0, seq }, end_node }; --- test: Legacy A5 spec: legacy_A5 yaml: | ? - New York Yankees - Atlanta Braves : - 2001-07-02 - 2001-08-12 - 2001-08-14 ? - Detroit Tigers - Chicago Cubs : - 2001-07-23 perl-busted: > YAML.pm will be able to emulate this behavior soon. In this regard it may be somewhat more correct than Python's native behaviour which can only use tuples as mapping keys. PyYAML will also need to figure out some clever way to roundtrip structured keys. python: | [ { ('New York Yankees', 'Atlanta Braves'): [yaml.timestamp('2001-07-02'), yaml.timestamp('2001-08-12'), yaml.timestamp('2001-08-14')], ('Detroit Tigers', 'Chicago Cubs'): [yaml.timestamp('2001-07-23')] } ] ruby: | { [ 'New York Yankees', 'Atlanta Braves' ] => [ Date.new( 2001, 7, 2 ), Date.new( 2001, 8, 12 ), Date.new( 2001, 8, 14 ) ], [ 'Detroit Tigers', 'Chicago Cubs' ] => [ Date.new( 2001, 7, 23 ) ] } syck: | struct test_node seq1[] = { { T_STR, 0, "New York Yankees" }, { T_STR, 0, "Atlanta Braves" }, end_node }; struct test_node seq2[] = { { T_STR, 0, "2001-07-02" }, { T_STR, 0, "2001-08-12" }, { T_STR, 0, "2001-08-14" }, end_node }; struct test_node seq3[] = { { T_STR, 0, "Detroit Tigers" }, { T_STR, 0, "Chicago Cubs" }, end_node }; struct test_node seq4[] = { { T_STR, 0, "2001-07-23" }, end_node }; struct test_node map[] = { { T_SEQ, 0, 0, seq1 }, { T_SEQ, 0, 0, seq2 }, { T_SEQ, 0, 0, seq3 }, { T_SEQ, 0, 0, seq4 }, end_node }; struct test_node stream[] = { { T_MAP, 0, 0, map }, end_node }; --- test: Sequence of sequences spec: 2.5 yaml: | - [ name , hr , avg ] - [ Mark McGwire , 65 , 0.278 ] - [ Sammy Sosa , 63 , 0.288 ] perl: | [ [ 'name', 'hr', 'avg' ], [ 'Mark McGwire', 65, 0.278 ], [ 'Sammy Sosa', 63, 0.288 ], ] python: | [[ [ 'name', 'hr', 'avg' ], [ 'Mark McGwire', 65, 0.278 ], [ 'Sammy Sosa', 63, 0.288 ] ]] ruby: | [ [ 'name', 'hr', 'avg' ], [ 'Mark McGwire', 65, 0.278 ], [ 'Sammy Sosa', 63, 0.288 ] ] syck: | struct test_node seq1[] = { { T_STR, 0, "name" }, { T_STR, 0, "hr" }, { T_STR, 0, "avg" }, end_node }; struct test_node seq2[] = { { T_STR, 0, "Mark McGwire" }, { T_STR, 0, "65" }, { T_STR, 0, "0.278" }, end_node }; struct test_node seq3[] = { { T_STR, 0, "Sammy Sosa" }, { T_STR, 0, "63" }, { T_STR, 0, "0.288" }, end_node }; struct test_node seq[] = { { T_SEQ, 0, 0, seq1 }, { T_SEQ, 0, 0, seq2 }, { T_SEQ, 0, 0, seq3 }, end_node }; struct test_node stream[] = { { T_SEQ, 0, 0, seq }, end_node }; --- test: Mapping of mappings spec: 2.6 yaml: | Mark McGwire: {hr: 65, avg: 0.278} Sammy Sosa: { hr: 63, avg: 0.288 } perl-xxx: | { 'Mark McGwire' => { 'hr' => 65, 'avg' => 0.278 }, 'Sammy Sosa' => { 'hr' => 63, 'avg' => 0.288 }, } not_yet_in_python: | [{ 'Mark McGwire': { 'hr': 65, 'avg': 0.278 }, 'Sammy Sosa': { 'hr': 63, 'avg': 0.288 } }] ruby: | { 'Mark McGwire' => { 'hr' => 65, 'avg' => 0.278 }, 'Sammy Sosa' => { 'hr' => 63, 'avg' => 0.288 } } syck: | struct test_node map1[] = { { T_STR, 0, "hr" }, { T_STR, 0, "65" }, { T_STR, 0, "avg" }, { T_STR, 0, "0.278" }, end_node }; struct test_node map2[] = { { T_STR, 0, "hr" }, { T_STR, 0, "63" }, { T_STR, 0, "avg" }, { T_STR, 0, "0.288" }, end_node }; struct test_node map[] = { { T_STR, 0, "Mark McGwire" }, { T_MAP, 0, 0, map1 }, { T_STR, 0, "Sammy Sosa" }, { T_MAP, 0, 0, map2 }, end_node }; struct test_node stream[] = { { T_MAP, 0, 0, map }, end_node }; --- test: Two documents in a stream each with a leading comment spec: 2.7 yaml: | # Ranking of 1998 home runs --- - Mark McGwire - Sammy Sosa - Ken Griffey # Team ranking --- - Chicago Cubs - St Louis Cardinals perl: | { name => 'Mark McGwire', 'hr' => 65, 'avg' => 0.278 }, { name => 'Sammy Sosa', 'hr' => 63, 'avg' => 0.288 } python: | [ { 'name': 'Mark McGwire', 'hr': 65, 'avg': 0.278 }, { 'name': 'Sammy Sosa', 'hr': 63, 'avg': 0.288 } ] ruby: | y = YAML::Stream.new y.add( [ 'Mark McGwire', 'Sammy Sosa', 'Ken Griffey' ] ) y.add( [ 'Chicago Cubs', 'St Louis Cardinals' ] ) syck: | struct test_node seq1[] = { { T_STR, 0, "Mark McGwire" }, { T_STR, 0, "Sammy Sosa" }, { T_STR, 0, "Ken Griffey" }, end_node }; struct test_node seq2[] = { { T_STR, 0, "Chicago Cubs" }, { T_STR, 0, "St Louis Cardinals" }, end_node }; struct test_node stream[] = { { T_SEQ, 0, 0, seq1 }, { T_SEQ, 0, 0, seq2 }, end_node }; documents: 2 --- test: Play by play feed from a game spec: 2.8 yaml: | --- time: 20:03:20 player: Sammy Sosa action: strike (miss) ... --- time: 20:03:47 player: Sammy Sosa action: grand slam ... perl: | [ 'Mark McGwire', 'Sammy Sosa', 'Ken Griffey' ] python: | [[ 'Mark McGwire', 'Sammy Sosa', 'Ken Griffey' ]] ruby: | y = YAML::Stream.new y.add( {"player"=>"Sammy Sosa", "time"=>72200, "action"=>"strike (miss)"} ) y.add( {"player"=>"Sammy Sosa", "time"=>72227, "action"=>"grand slam"} ) syck: | struct test_node map1[] = { { T_STR, 0, "time" }, { T_STR, 0, "20:03:20" }, { T_STR, 0, "player" }, { T_STR, 0, "Sammy Sosa" }, { T_STR, 0, "action" }, { T_STR, 0, "strike (miss)" }, end_node }; struct test_node map2[] = { { T_STR, 0, "time" }, { T_STR, 0, "20:03:47" }, { T_STR, 0, "player" }, { T_STR, 0, "Sammy Sosa" }, { T_STR, 0, "action" }, { T_STR, 0, "grand slam" }, end_node }; struct test_node stream[] = { { T_MAP, 0, 0, map1 }, { T_MAP, 0, 0, map2 }, end_node }; documents: 2 --- test: Single document with two comments spec: 2.9 yaml: | hr: # 1998 hr ranking - Mark McGwire - Sammy Sosa rbi: # 1998 rbi ranking - Sammy Sosa - Ken Griffey perl-xxx: | { 'hr' => [ 'Mark McGwire', 'Sammy Sosa' ], 'rbi' => [ 'Sammy Sosa', 'Ken Griffey' ] } python: | [{ 'hr': [ 'Mark McGwire', 'Sammy Sosa' ], 'rbi': [ 'Sammy Sosa', 'Ken Griffey' ] }] ruby: | { 'hr' => [ 'Mark McGwire', 'Sammy Sosa' ], 'rbi' => [ 'Sammy Sosa', 'Ken Griffey' ] } syck: | struct test_node seq1[] = { { T_STR, 0, "Mark McGwire" }, { T_STR, 0, "Sammy Sosa" }, end_node }; struct test_node seq2[] = { { T_STR, 0, "Sammy Sosa" }, { T_STR, 0, "Ken Griffey" }, end_node }; struct test_node map[] = { { T_STR, 0, "hr" }, { T_SEQ, 0, 0, seq1 }, { T_STR, 0, "rbi" }, { T_SEQ, 0, 0, seq2 }, end_node }; struct test_node stream[] = { { T_MAP, 0, 0, map }, end_node }; --- test: Node for Sammy Sosa appears twice in this document spec: 2.10 yaml: | --- hr: - Mark McGwire # Following node labeled SS - &SS Sammy Sosa rbi: - *SS # Subsequent occurance - Ken Griffey perl: | { 'hr' => [ 'Mark McGwire', 'Sammy Sosa' ], 'rbi' => [ 'Sammy Sosa', 'Ken Griffey' ] } python: | [{ 'hr': [ 'Mark McGwire', 'Sammy Sosa' ], 'rbi': [ 'Sammy Sosa', 'Ken Griffey' ] }] ruby: | { 'hr' => [ 'Mark McGwire', 'Sammy Sosa' ], 'rbi' => [ 'Sammy Sosa', 'Ken Griffey' ] } syck: | struct test_node seq1[] = { { T_STR, 0, "Mark McGwire" }, { T_STR, 0, "Sammy Sosa" }, end_node }; struct test_node seq2[] = { { T_STR, 0, "Sammy Sosa" }, { T_STR, 0, "Ken Griffey" }, end_node }; struct test_node map[] = { { T_STR, 0, "hr" }, { T_SEQ, 0, 0, seq1 }, { T_STR, 0, "rbi" }, { T_SEQ, 0, 0, seq2 }, end_node }; struct test_node stream[] = { { T_MAP, 0, 0, map }, end_node }; --- test: Mapping between sequences spec: 2.11 yaml: | ? # PLAY SCHEDULE - Detroit Tigers - Chicago Cubs : - 2001-07-23 ? [ New York Yankees, Atlanta Braves ] : [ 2001-07-02, 2001-08-12, 2001-08-14 ] ruby: | { [ 'Detroit Tigers', 'Chicago Cubs' ] => [ Date.new( 2001, 7, 23 ) ], [ 'New York Yankees', 'Atlanta Braves' ] => [ Date.new( 2001, 7, 2 ), Date.new( 2001, 8, 12 ), Date.new( 2001, 8, 14 ) ] } syck: | struct test_node seq1[] = { { T_STR, 0, "New York Yankees" }, { T_STR, 0, "Atlanta Braves" }, end_node }; struct test_node seq2[] = { { T_STR, 0, "2001-07-02" }, { T_STR, 0, "2001-08-12" }, { T_STR, 0, "2001-08-14" }, end_node }; struct test_node seq3[] = { { T_STR, 0, "Detroit Tigers" }, { T_STR, 0, "Chicago Cubs" }, end_node }; struct test_node seq4[] = { { T_STR, 0, "2001-07-23" }, end_node }; struct test_node map[] = { { T_SEQ, 0, 0, seq3 }, { T_SEQ, 0, 0, seq4 }, { T_SEQ, 0, 0, seq1 }, { T_SEQ, 0, 0, seq2 }, end_node }; struct test_node stream[] = { { T_MAP, 0, 0, map }, end_node }; --- test: Sequence key shortcut spec: 2.12 yaml: | --- # products purchased - item : Super Hoop quantity: 1 - item : Basketball quantity: 4 - item : Big Shoes quantity: 1 perl: | [ { item => 'Super Hoop', quantity => 1 }, { item => 'Basketball', quantity => 4 }, { item => 'Big Shoes', quantity => 1 } ] ruby: | [ { 'item' => 'Super Hoop', 'quantity' => 1 }, { 'item' => 'Basketball', 'quantity' => 4 }, { 'item' => 'Big Shoes', 'quantity' => 1 } ] python: | [ { 'item': 'Super Hoop', 'quantity': 1 }, { 'item': 'Basketball', 'quantity': 4 }, { 'item': 'Big Shoes', 'quantity': 1 } ] syck: | struct test_node map1[] = { { T_STR, 0, "item" }, { T_STR, 0, "Super Hoop" }, { T_STR, 0, "quantity" }, { T_STR, 0, "1" }, end_node }; struct test_node map2[] = { { T_STR, 0, "item" }, { T_STR, 0, "Basketball" }, { T_STR, 0, "quantity" }, { T_STR, 0, "4" }, end_node }; struct test_node map3[] = { { T_STR, 0, "item" }, { T_STR, 0, "Big Shoes" }, { T_STR, 0, "quantity" }, { T_STR, 0, "1" }, end_node }; struct test_node seq[] = { { T_MAP, 0, 0, map1 }, { T_MAP, 0, 0, map2 }, { T_MAP, 0, 0, map3 }, end_node }; struct test_node stream[] = { { T_SEQ, 0, 0, seq }, end_node }; --- test: Literal perserves newlines spec: 2.13 yaml: | # ASCII Art --- | \//||\/|| // || ||_ perl: | "\\//||\\/||\n// || ||_\n" ruby: | "\\//||\\/||\n// || ||_\n" python: | [ flushLeft( """ \//||\/|| // || ||_ """ ) ] syck: | struct test_node stream[] = { { T_STR, 0, "\\//||\\/||\n// || ||_\n" }, end_node }; --- test: Folded treats newlines as a space spec: 2.14 yaml: | --- Mark McGwire's year was crippled by a knee injury. perl: | "Mark McGwire's year was crippled by a knee injury." ruby: | "Mark McGwire's year was crippled by a knee injury." python: | [ "Mark McGwire's year was crippled by a knee injury." ] syck: | struct test_node stream[] = { { T_STR, 0, "Mark McGwire's year was crippled by a knee injury." }, end_node }; --- test: Newlines preserved for indented and blank lines spec: 2.15 yaml: | --- > Sammy Sosa completed another fine season with great stats. 63 Home Runs 0.288 Batting Average What a year! perl: | "Sammy Sosa completed another fine season with great stats.\n\n 63 Home Runs\n 0.288 Batting Average\n\nWhat a year!\n" ruby: | "Sammy Sosa completed another fine season with great stats.\n\n 63 Home Runs\n 0.288 Batting Average\n\nWhat a year!\n" python: | [ flushLeft( """ Sammy Sosa completed another fine season with great stats. 63 Home Runs 0.288 Batting Average What a year! """ ) ] syck: | struct test_node stream[] = { { T_STR, 0, "Sammy Sosa completed another fine season with great stats.\n\n 63 Home Runs\n 0.288 Batting Average\n\nWhat a year!\n" }, end_node }; --- test: Indentation determines scope spec: 2.16 yaml: | name: Mark McGwire accomplishment: > Mark set a major league home run record in 1998. stats: | 65 Home Runs 0.278 Batting Average perl: | { name => 'Mark McGwire', accomplishment => "Mark set a major league home run record in 1998.\n", stats => "65 Home Runs\n0.278 Batting Average\n" } ruby: | { 'name' => 'Mark McGwire', 'accomplishment' => "Mark set a major league home run record in 1998.\n", 'stats' => "65 Home Runs\n0.278 Batting Average\n" } python: | [ { 'name': 'Mark McGwire', 'accomplishment': 'Mark set a major league home run record in 1998.\n', 'stats': "65 Home Runs\n0.278 Batting Average\n" } ] syck: | struct test_node map[] = { { T_STR, 0, "name" }, { T_STR, 0, "Mark McGwire" }, { T_STR, 0, "accomplishment" }, { T_STR, 0, "Mark set a major league home run record in 1998.\n" }, { T_STR, 0, "stats" }, { T_STR, 0, "65 Home Runs\n0.278 Batting Average\n" }, end_node }; struct test_node stream[] = { { T_MAP, 0, 0, map }, end_node }; --- test: Quoted scalars spec: 2.17 yaml: | unicode: "Sosa did fine.\u263A" control: "\b1998\t1999\t2000\n" hexesc: "\x0D\x0A is \r\n" single: '"Howdy!" he cried.' quoted: ' # not a ''comment''.' tie-fighter: '|\-*-/|' perl-not-working: | { unicode => "Sosa did fine.\x{263A}", control => "\x081998\t1999\t2000\n", hexesc => "\r\n is \r\n", single => '"Howdy!" he cried.', quoted => " # not a 'comment'.", "tie-fighter" => '|\\-*-/|', } ruby: | { "tie-fighter" => "|\\-*-/|", "control"=>"\0101998\t1999\t2000\n", "unicode"=>"Sosa did fine." + ["263A".hex ].pack('U*'), "quoted"=>" # not a 'comment'.", "single"=>"\"Howdy!\" he cried.", "hexesc"=>"\r\n is \r\n" } python: | [ { 'unicode': u"Sosa did fine.\u263A", 'control': "\b1998\t1999\t2000\n", 'hexesc': "\x0D\x0A is \r\n", 'single': '"Howdy!" he cried.', 'quoted': ' # not a \'comment\'.', 'tie-fighter': '|\-*-/|', } ] --- test: Multiline flow scalars spec: 2.18 yaml: | plain: This unquoted scalar spans many lines. quoted: "So does this quoted scalar.\n" perl-not-working: | { plain => 'This unquoted scalar spans many lines.', quoted => "So does this quoted scalar.\n" } ruby: | { 'plain' => 'This unquoted scalar spans many lines.', 'quoted' => "So does this quoted scalar.\n" } python: | [ { 'plain': 'This unquoted scalar spans many lines.', 'quoted': 'So does this quoted scalar.\n' } ] syck: | struct test_node map[] = { { T_STR, 0, "plain" }, { T_STR, 0, "This unquoted scalar spans many lines." }, { T_STR, 0, "quoted" }, { T_STR, 0, "So does this quoted scalar.\n" }, end_node }; struct test_node stream[] = { { T_MAP, 0, 0, map }, end_node }; --- test: Integers spec: 2.19 yaml: | canonical: 12345 decimal: +12,345 sexagecimal: 3:25:45 octal: 014 hexadecimal: 0xC perl: | { canonical => 12345, decimal => 12345, octal => oct("014"), hexadecimal => hex("0xC"), } ruby: | { 'canonical' => 12345, 'decimal' => 12345, 'sexagecimal' => 12345, 'octal' => '014'.oct, 'hexadecimal' => '0xC'.hex } python: | [ { 'canonical': 12345, 'decimal': 12345, 'octal': 014, 'hexadecimal': 0xC } ] syck: | struct test_node map[] = { { T_STR, 0, "canonical" }, { T_STR, 0, "12345" }, { T_STR, 0, "decimal" }, { T_STR, 0, "+12,345" }, { T_STR, 0, "sexagecimal" }, { T_STR, 0, "3:25:45" }, { T_STR, 0, "octal" }, { T_STR, 0, "014" }, { T_STR, 0, "hexadecimal" }, { T_STR, 0, "0xC" }, end_node }; struct test_node stream[] = { { T_MAP, 0, 0, map }, end_node }; --- # FIX: spec shows parens around -inf and NaN test: Floating point spec: 2.20 yaml: | canonical: 1.23015e+3 exponential: 12.3015e+02 sexagecimal: 20:30.15 fixed: 1,230.15 negative infinity: -.inf not a number: .NaN perl: | { 'canonical' => 1230.15, 'exponential' => 1230.15, 'fixed' => 1230.15, 'negative infinity' => "-.inf", 'not a number' => ".NaN", } ruby: | { 'canonical' => 1230.15, 'exponential' => 1230.15, 'sexagecimal' => 1230.15, 'fixed' => 1230.15, 'negative infinity' => -1.0/0.0, 'not a number' => 0.0/0.0 } if obj_y['not a number'].nan? # NaN comparison doesn't work right against 0.0/0.0 obj_r['not a number'] = obj_y['not a number'] end python: | [ { 'canonical': 1.23015e+3, 'exponential': 1.23015e+3, 'fixed': 1230.15, 'negative infinity': '-.inf', 'not a number': '.NaN', } ] syck: | struct test_node map[] = { { T_STR, 0, "canonical" }, { T_STR, 0, "1.23015e+3" }, { T_STR, 0, "exponential" }, { T_STR, 0, "12.3015e+02" }, { T_STR, 0, "sexagecimal" }, { T_STR, 0, "20:30.15" }, { T_STR, 0, "fixed" }, { T_STR, 0, "1,230.15" }, { T_STR, 0, "negative infinity" }, { T_STR, 0, "-.inf" }, { T_STR, 0, "not a number" }, { T_STR, 0, ".NaN" }, end_node }; struct test_node stream[] = { { T_MAP, 0, 0, map }, end_node }; --- test: Miscellaneous spec: 2.21 yaml: | null: ~ true: y false: n string: '12345' perl: | { null => undef, true => 1, false => 0, string => "12345" } ruby: | { nil => nil, true => true, false => false, 'string' => '12345' } python: | [ { 'null': None, 'true': 1, 'false': 0, 'string': '12345', } ] syck: | struct test_node map[] = { { T_STR, 0, "null" }, { T_STR, 0, "~" }, { T_STR, 0, "true" }, { T_STR, 0, "y" }, { T_STR, 0, "false" }, { T_STR, 0, "n" }, { T_STR, 0, "string" }, { T_STR, 0, "12345" }, end_node }; struct test_node stream[] = { { T_MAP, 0, 0, map }, end_node }; --- test: Timestamps spec: 2.22 yaml: | canonical: 2001-12-15T02:59:43.1Z iso8601: 2001-12-14t21:59:43.10-05:00 spaced: 2001-12-14 21:59:43.10 -05:00 date: 2002-12-14 # Time is noon UTC perl: | { canonical => "2001-12-15T02:59:43.1Z", iso8601 => "2001-12-14t21:59:43.10-05:00", spaced => "2001-12-14 21:59:43.10 -05:00", date => "2002-12-14", } ruby: | { 'canonical' => YAML::mktime( 2001, 12, 15, 2, 59, 43, 0.10 ), 'iso8601' => YAML::mktime( 2001, 12, 14, 21, 59, 43, 0.10, "-05:00" ), 'spaced' => YAML::mktime( 2001, 12, 14, 21, 59, 43, 0.10, "-05:00" ), 'date' => Date.new( 2002, 12, 14 ) } syck: | struct test_node map[] = { { T_STR, 0, "canonical" }, { T_STR, 0, "2001-12-15T02:59:43.1Z" }, { T_STR, 0, "iso8601" }, { T_STR, 0, "2001-12-14t21:59:43.10-05:00" }, { T_STR, 0, "spaced" }, { T_STR, 0, "2001-12-14 21:59:43.10 -05:00" }, { T_STR, 0, "date" }, { T_STR, 0, "2002-12-14" }, end_node }; struct test_node stream[] = { { T_MAP, 0, 0, map }, end_node }; --- test: legacy Timestamps test spec: legacy D4 yaml: | canonical: 2001-12-15T02:59:43.00Z iso8601: 2001-02-28t21:59:43.00-05:00 spaced: 2001-12-14 21:59:43.00 -05:00 date: 2002-12-14 python: | [ { 'canonical': yaml.timestamp('2001-12-15T02:59:43.00Z'), 'iso8601': yaml.timestamp('2001-03-01T02:59:43.00Z'), 'spaced': yaml.timestamp('2001-12-15T02:59:43.00Z'), 'date': yaml.timestamp('2002-12-14T00:00:00.00Z') } ] ruby: | { 'canonical' => Time::utc( 2001, 12, 15, 2, 59, 43, 0 ), 'iso8601' => YAML::mktime( 2001, 2, 28, 21, 59, 43, 0, "-05:00" ), 'spaced' => YAML::mktime( 2001, 12, 14, 21, 59, 43, 0, "-05:00" ), 'date' => Date.new( 2002, 12, 14 ) } syck: | struct test_node map[] = { { T_STR, 0, "canonical" }, { T_STR, 0, "2001-12-15T02:59:43.00Z" }, { T_STR, 0, "iso8601" }, { T_STR, 0, "2001-02-28t21:59:43.00-05:00" }, { T_STR, 0, "spaced" }, { T_STR, 0, "2001-12-14 21:59:43.00 -05:00" }, { T_STR, 0, "date" }, { T_STR, 0, "2002-12-14" }, end_node }; struct test_node stream[] = { { T_MAP, 0, 0, map }, end_node }; --- test: Various explicit families spec: 2.23 yaml: | not-date: !str 2002-04-28 picture: !binary | R0lGODlhDAAMAIQAAP//9/X 17unp5WZmZgAAAOfn515eXv Pz7Y6OjuDg4J+fn5OTk6enp 56enmleECcgggoBADs= application specific tag: !!something | The semantics of the tag above may be different for different documents. ruby-setup: | YAML.add_private_type( "something" ) do |type, val| "SOMETHING: #{val}" end ruby: | { 'not-date' => '2002-04-28', 'picture' => "GIF89a\f\000\f\000\204\000\000\377\377\367\365\365\356\351\351\345fff\000\000\000\347\347\347^^^\363\363\355\216\216\216\340\340\340\237\237\237\223\223\223\247\247\247\236\236\236i^\020' \202\n\001\000;", 'application specific tag' => "SOMETHING: The semantics of the tag\nabove may be different for\ndifferent documents.\n" } syck: | struct test_node map[] = { { T_STR, 0, "not-date" }, { T_STR, "tag:yaml.org,2002:str", "2002-04-28" }, { T_STR, 0, "picture" }, { T_STR, "tag:yaml.org,2002:binary", "R0lGODlhDAAMAIQAAP//9/X\n17unp5WZmZgAAAOfn515eXv\nPz7Y6OjuDg4J+fn5OTk6enp\n56enmleECcgggoBADs=\n" }, { T_STR, 0, "application specific tag" }, { T_STR, "x-private:something", "The semantics of the tag\nabove may be different for\ndifferent documents.\n" }, end_node }; struct test_node stream[] = { { T_MAP, 0, 0, map }, end_node }; --- test: Application specific family spec: 2.24 yaml: | # Establish a tag prefix --- !clarkevans.com,2002/graph/^shape # Use the prefix: shorthand for # !clarkevans.com,2002/graph/circle - !^circle center: &ORIGIN {x: 73, 'y': 129} radius: 7 - !^line # !clarkevans.com,2002/graph/line start: *ORIGIN finish: { x: 89, 'y': 102 } - !^label start: *ORIGIN color: 0xFFEEBB value: Pretty vector drawing. ruby-setup: | YAML.add_domain_type( "clarkevans.com,2002", 'graph/shape' ) { |type, val| if Array === val val << "Shape Container" val else raise YAML::Error, "Invalid graph of class #{ val.class }: " + val.inspect end } one_shape_proc = Proc.new { |type, val| scheme, domain, type = type.split( /:/, 3 ) if val.is_a? ::Hash val['TYPE'] = "Shape: #{type}" val else raise YAML::Error, "Invalid graph of class #{ val.class }: " + val.inspect end } YAML.add_domain_type( "clarkevans.com,2002", 'graph/circle', &one_shape_proc ) YAML.add_domain_type( "clarkevans.com,2002", 'graph/line', &one_shape_proc ) YAML.add_domain_type( "clarkevans.com,2002", 'graph/label', &one_shape_proc ) ruby: | [ { "radius" => 7, "center"=> { "x" => 73, "y" => 129 }, "TYPE" => "Shape: graph/circle" }, { "finish" => { "x" => 89, "y" => 102 }, "TYPE" => "Shape: graph/line", "start" => { "x" => 73, "y" => 129 } }, { "TYPE" => "Shape: graph/label", "value" => "Pretty vector drawing.", "start" => { "x" => 73, "y" => 129 }, "color" => 16772795 }, "Shape Container" ] syck: | struct test_node point1[] = { { T_STR, 0, "x" }, { T_STR, 0, "73" }, { T_STR, 0, "y" }, { T_STR, 0, "129" }, end_node }; struct test_node point2[] = { { T_STR, 0, "x" }, { T_STR, 0, "89" }, { T_STR, 0, "y" }, { T_STR, 0, "102" }, end_node }; struct test_node map1[] = { { T_STR, 0, "center" }, { T_MAP, 0, 0, point1 }, { T_STR, 0, "radius" }, { T_STR, 0, "7" }, end_node }; struct test_node map2[] = { { T_STR, 0, "start" }, { T_MAP, 0, 0, point1 }, { T_STR, 0, "finish" }, { T_MAP, 0, 0, point2 }, end_node }; struct test_node map3[] = { { T_STR, 0, "start" }, { T_MAP, 0, 0, point1 }, { T_STR, 0, "color" }, { T_STR, 0, "0xFFEEBB" }, { T_STR, 0, "value" }, { T_STR, 0, "Pretty vector drawing." }, end_node }; struct test_node seq[] = { { T_MAP, "tag:clarkevans.com,2002:graph/circle", 0, map1 }, { T_MAP, "tag:clarkevans.com,2002:graph/line", 0, map2 }, { T_MAP, "tag:clarkevans.com,2002:graph/label", 0, map3 }, end_node }; struct test_node stream[] = { { T_SEQ, "tag:clarkevans.com,2002:graph/shape", 0, seq }, end_node }; # --- # test: Unordered set # spec: 2.25 # yaml: | # # sets are represented as a # # mapping where each key is # # associated with the empty string # --- !set # ? Mark McGwire # ? Sammy Sosa # ? Ken Griff --- test: Ordered mappings spec: 2.26 yaml: | # ordered maps are represented as # a sequence of mappings, with # each mapping having one key --- !omap - Mark McGwire: 65 - Sammy Sosa: 63 - Ken Griffy: 58 ruby: | YAML::Omap[ 'Mark McGwire', 65, 'Sammy Sosa', 63, 'Ken Griffy', 58 ] syck: | struct test_node map1[] = { { T_STR, 0, "Mark McGwire" }, { T_STR, 0, "65" }, end_node }; struct test_node map2[] = { { T_STR, 0, "Sammy Sosa" }, { T_STR, 0, "63" }, end_node }; struct test_node map3[] = { { T_STR, 0, "Ken Griffy" }, { T_STR, 0, "58" }, end_node }; struct test_node seq[] = { { T_MAP, 0, 0, map1 }, { T_MAP, 0, 0, map2 }, { T_MAP, 0, 0, map3 }, end_node }; struct test_node stream[] = { { T_SEQ, "tag:yaml.org,2002:omap", 0, seq }, end_node }; --- test: Invoice spec: 2.27 yaml: | --- !clarkevans.com,2002/^invoice invoice: 34843 date : 2001-01-23 bill-to: &id001 given : Chris family : Dumars address: lines: | 458 Walkman Dr. Suite #292 city : Royal Oak state : MI postal : 48046 ship-to: *id001 product: - sku : BL394D quantity : 4 description : Basketball price : 450.00 - sku : BL4438H quantity : 1 description : Super Hoop price : 2392.00 tax : 251.42 total: 4443.52 comments: > Late afternoon is best. Backup contact is Nancy Billsmer @ 338-4338. ruby-setup: | YAML.add_domain_type( "clarkevans.com,2002", "invoice" ) { |type, val| val } id001 = { 'given' => 'Chris', 'family' => 'Dumars', 'address' => { 'lines' => "458 Walkman Dr.\nSuite #292\n", 'city' => 'Royal Oak', 'state' => 'MI', 'postal' => 48046 } } ruby: | { 'invoice' => 34843, 'date' => Date.new( 2001, 1, 23 ), 'bill-to' => id001, 'ship-to' => id001, 'product' => [ { 'sku' => 'BL394D', 'quantity' => 4, 'description' => 'Basketball', 'price' => 450.00 }, { 'sku' => 'BL4438H', 'quantity' => 1, 'description' => 'Super Hoop', 'price' => 2392.00 } ], 'tax' => 251.42, 'total' => 4443.52, 'comments' => "Late afternoon is best. Backup contact is Nancy Billsmer @ 338-4338.\n" } syck: | struct test_node prod1[] = { { T_STR, 0, "sku" }, { T_STR, 0, "BL394D" }, { T_STR, 0, "quantity" }, { T_STR, 0, "4" }, { T_STR, 0, "description" }, { T_STR, 0, "Basketball" }, { T_STR, 0, "price" }, { T_STR, 0, "450.00" }, end_node }; struct test_node prod2[] = { { T_STR, 0, "sku" }, { T_STR, 0, "BL4438H" }, { T_STR, 0, "quantity" }, { T_STR, 0, "1" }, { T_STR, 0, "description" }, { T_STR, 0, "Super Hoop" }, { T_STR, 0, "price" }, { T_STR, 0, "2392.00" }, end_node }; struct test_node products[] = { { T_MAP, 0, 0, prod1 }, { T_MAP, 0, 0, prod2 }, end_node }; struct test_node address[] = { { T_STR, 0, "lines" }, { T_STR, 0, "458 Walkman Dr.\nSuite #292\n" }, { T_STR, 0, "city" }, { T_STR, 0, "Royal Oak" }, { T_STR, 0, "state" }, { T_STR, 0, "MI" }, { T_STR, 0, "postal" }, { T_STR, 0, "48046" }, end_node }; struct test_node id001[] = { { T_STR, 0, "given" }, { T_STR, 0, "Chris" }, { T_STR, 0, "family" }, { T_STR, 0, "Dumars" }, { T_STR, 0, "address" }, { T_MAP, 0, 0, address }, end_node }; struct test_node map[] = { { T_STR, 0, "invoice" }, { T_STR, 0, "34843" }, { T_STR, 0, "date" }, { T_STR, 0, "2001-01-23" }, { T_STR, 0, "bill-to" }, { T_MAP, 0, 0, id001 }, { T_STR, 0, "ship-to" }, { T_MAP, 0, 0, id001 }, { T_STR, 0, "product" }, { T_SEQ, 0, 0, products }, { T_STR, 0, "tax" }, { T_STR, 0, "251.42" }, { T_STR, 0, "total" }, { T_STR, 0, "4443.52" }, { T_STR, 0, "comments" }, { T_STR, 0, "Late afternoon is best. Backup contact is Nancy Billsmer @ 338-4338.\n" }, end_node }; struct test_node stream[] = { { T_MAP, "tag:clarkevans.com,2002:invoice", 0, map }, end_node }; --- test: Log file spec: 2.28 yaml: | --- Time: 2001-11-23 15:01:42 -05:00 User: ed Warning: > This is an error message for the log file --- Time: 2001-11-23 15:02:31 -05:00 User: ed Warning: > A slightly different error message. --- Date: 2001-11-23 15:03:17 -05:00 User: ed Fatal: > Unknown variable "bar" Stack: - file: TopClass.py line: 23 code: | x = MoreObject("345\n") - file: MoreClass.py line: 58 code: |- foo = bar ruby: | y = YAML::Stream.new y.add( { 'Time' => YAML::mktime( 2001, 11, 23, 15, 01, 42, 00, "-05:00" ), 'User' => 'ed', 'Warning' => "This is an error message for the log file\n" } ) y.add( { 'Time' => YAML::mktime( 2001, 11, 23, 15, 02, 31, 00, "-05:00" ), 'User' => 'ed', 'Warning' => "A slightly different error message.\n" } ) y.add( { 'Date' => YAML::mktime( 2001, 11, 23, 15, 03, 17, 00, "-05:00" ), 'User' => 'ed', 'Fatal' => "Unknown variable \"bar\"\n", 'Stack' => [ { 'file' => 'TopClass.py', 'line' => 23, 'code' => "x = MoreObject(\"345\\n\")\n" }, { 'file' => 'MoreClass.py', 'line' => 58, 'code' => "foo = bar" } ] } ) syck: | struct test_node map1[] = { { T_STR, 0, "Time" }, { T_STR, 0, "2001-11-23 15:01:42 -05:00" }, { T_STR, 0, "User" }, { T_STR, 0, "ed" }, { T_STR, 0, "Warning" }, { T_STR, 0, "This is an error message for the log file\n" }, end_node }; struct test_node map2[] = { { T_STR, 0, "Time" }, { T_STR, 0, "2001-11-23 15:02:31 -05:00" }, { T_STR, 0, "User" }, { T_STR, 0, "ed" }, { T_STR, 0, "Warning" }, { T_STR, 0, "A slightly different error message.\n" }, end_node }; struct test_node file1[] = { { T_STR, 0, "file" }, { T_STR, 0, "TopClass.py" }, { T_STR, 0, "line" }, { T_STR, 0, "23" }, { T_STR, 0, "code" }, { T_STR, 0, "x = MoreObject(\"345\\n\")\n" }, end_node }; struct test_node file2[] = { { T_STR, 0, "file" }, { T_STR, 0, "MoreClass.py" }, { T_STR, 0, "line" }, { T_STR, 0, "58" }, { T_STR, 0, "code" }, { T_STR, 0, "foo = bar" }, end_node }; struct test_node stack[] = { { T_MAP, 0, 0, file1 }, { T_MAP, 0, 0, file2 }, end_node }; struct test_node map3[] = { { T_STR, 0, "Date" }, { T_STR, 0, "2001-11-23 15:03:17 -05:00" }, { T_STR, 0, "User" }, { T_STR, 0, "ed" }, { T_STR, 0, "Fatal" }, { T_STR, 0, "Unknown variable \"bar\"\n" }, { T_STR, 0, "Stack" }, { T_SEQ, 0, 0, stack }, end_node }; struct test_node stream[] = { { T_MAP, 0, 0, map1 }, { T_MAP, 0, 0, map2 }, { T_MAP, 0, 0, map3 }, end_node }; documents: 3 --- test: Throwaway comments yaml: | ### These are four throwaway comment ### ### lines (the second line is empty). ### this: | # Comments may trail lines. contains three lines of text. The third one starts with a # character. This isn't a comment. # These are three throwaway comment # lines (the first line is empty). ruby: | { 'this' => "contains three lines of text.\nThe third one starts with a\n# character. This isn't a comment.\n" } syck: | struct test_node map[] = { { T_STR, 0, "this" }, { T_STR, 0, "contains three lines of text.\nThe third one starts with a\n# character. This isn't a comment.\n" }, end_node }; struct test_node stream[] = { { T_MAP, 0, 0, map }, end_node }; --- test: Document with a single value yaml: | --- > This YAML stream contains a single text value. The next stream is a log file - a sequence of log entries. Adding an entry to the log is a simple matter of appending it at the end. ruby: | "This YAML stream contains a single text value. The next stream is a log file - a sequence of log entries. Adding an entry to the log is a simple matter of appending it at the end.\n" syck: | struct test_node stream[] = { { T_STR, 0, "This YAML stream contains a single text value. The next stream is a log file - a sequence of log entries. Adding an entry to the log is a simple matter of appending it at the end.\n" }, end_node }; --- test: Document stream yaml: | --- at: 2001-08-12 09:25:00.00 Z type: GET HTTP: '1.0' url: '/index.html' --- at: 2001-08-12 09:25:10.00 Z type: GET HTTP: '1.0' url: '/toc.html' ruby: | y = YAML::Stream.new y.add( { 'at' => Time::utc( 2001, 8, 12, 9, 25, 00 ), 'type' => 'GET', 'HTTP' => '1.0', 'url' => '/index.html' } ) y.add( { 'at' => Time::utc( 2001, 8, 12, 9, 25, 10 ), 'type' => 'GET', 'HTTP' => '1.0', 'url' => '/toc.html' } ) syck: | struct test_node map1[] = { { T_STR, 0, "at" }, { T_STR, 0, "2001-08-12 09:25:00.00 Z" }, { T_STR, 0, "type" }, { T_STR, 0, "GET" }, { T_STR, 0, "HTTP" }, { T_STR, 0, "1.0" }, { T_STR, 0, "url" }, { T_STR, 0, "/index.html" }, end_node }; struct test_node map2[] = { { T_STR, 0, "at" }, { T_STR, 0, "2001-08-12 09:25:10.00 Z" }, { T_STR, 0, "type" }, { T_STR, 0, "GET" }, { T_STR, 0, "HTTP" }, { T_STR, 0, "1.0" }, { T_STR, 0, "url" }, { T_STR, 0, "/toc.html" }, end_node }; struct test_node stream[] = { { T_MAP, 0, 0, map1 }, { T_MAP, 0, 0, map2 }, end_node }; documents: 2 --- test: Top level mapping yaml: | # This stream is an example of a top-level mapping. invoice : 34843 date : 2001-01-23 total : 4443.52 ruby: | { 'invoice' => 34843, 'date' => Date.new( 2001, 1, 23 ), 'total' => 4443.52 } syck: | struct test_node map[] = { { T_STR, 0, "invoice" }, { T_STR, 0, "34843" }, { T_STR, 0, "date" }, { T_STR, 0, "2001-01-23" }, { T_STR, 0, "total" }, { T_STR, 0, "4443.52" }, end_node }; struct test_node stream[] = { { T_MAP, 0, 0, map }, end_node }; --- test: Single-line documents yaml: | # The following is a sequence of three documents. # The first contains an empty mapping, the second # an empty sequence, and the last an empty string. --- {} --- [ ] --- '' ruby: | y = YAML::Stream.new y.add( {} ) y.add( [] ) y.add( '' ) syck: | struct test_node map[] = { end_node }; struct test_node seq[] = { end_node }; struct test_node stream[] = { { T_MAP, 0, 0, map }, { T_SEQ, 0, 0, seq }, { T_STR, 0, "" }, end_node }; documents: 3 --- test: Document with pause yaml: | # A communication channel based on a YAML stream. --- sent at: 2002-06-06 11:46:25.10 Z payload: Whatever # Receiver can process this as soon as the following is sent: ... # Even if the next message is sent long after: --- sent at: 2002-06-06 12:05:53.47 Z payload: Whatever ... ruby: | y = YAML::Stream.new y.add( { 'sent at' => YAML::mktime( 2002, 6, 6, 11, 46, 25, 0.10 ), 'payload' => 'Whatever' } ) y.add( { "payload" => "Whatever", "sent at" => YAML::mktime( 2002, 6, 6, 12, 5, 53, 0.47 ) } ) syck: | struct test_node map1[] = { { T_STR, 0, "sent at" }, { T_STR, 0, "2002-06-06 11:46:25.10 Z" }, { T_STR, 0, "payload" }, { T_STR, 0, "Whatever" }, end_node }; struct test_node map2[] = { { T_STR, 0, "sent at" }, { T_STR, 0, "2002-06-06 12:05:53.47 Z" }, { T_STR, 0, "payload" }, { T_STR, 0, "Whatever" }, end_node }; struct test_node stream[] = { { T_MAP, 0, 0, map1 }, { T_MAP, 0, 0, map2 }, end_node }; documents: 2 --- test: Explicit typing yaml: | integer: 12 also int: ! "12" string: !str 12 ruby: | { 'integer' => 12, 'also int' => 12, 'string' => '12' } syck: | struct test_node map[] = { { T_STR, 0, "integer" }, { T_STR, "tag:yaml.org,2002:int", "12" }, { T_STR, 0, "also int" }, { T_STR, "tag:yaml.org,2002:int", "12" }, { T_STR, 0, "string" }, { T_STR, "tag:yaml.org,2002:str", "12" }, end_node }; struct test_node stream[] = { { T_MAP, 0, 0, map }, end_node }; --- test: Private types yaml: | # Both examples below make use of the 'x-private:ball' # type family URI, but with different semantics. --- pool: !!ball number: 8 color: black --- bearing: !!ball material: steel ruby: | y = YAML::Stream.new y.add( { 'pool' => YAML::PrivateType.new( 'ball', { 'number' => 8, 'color' => 'black' } ) } ) y.add( { 'bearing' => YAML::PrivateType.new( 'ball', { 'material' => 'steel' } ) } ) syck: | struct test_node pool[] = { { T_STR, 0, "number" }, { T_STR, 0, "8" }, { T_STR, 0, "color" }, { T_STR, 0, "black" }, end_node }; struct test_node map1[] = { { T_STR, 0, "pool" }, { T_MAP, "x-private:ball", 0, pool }, end_node }; struct test_node bearing[] = { { T_STR, 0, "material" }, { T_STR, 0, "steel" }, end_node }; struct test_node map2[] = { { T_STR, 0, "bearing" }, { T_MAP, "x-private:ball", 0, bearing }, end_node }; struct test_node stream[] = { { T_MAP, 0, 0, map1 }, { T_MAP, 0, 0, map2 }, end_node }; documents: 2 --- test: Type family under yaml.org yaml: | # The URI is 'tag:yaml.org,2002:str' - !str a Unicode string python: | [ [ 'a Unicode string' ] ] ruby: | [ 'a Unicode string' ] syck: | struct test_node seq[] = { { T_STR, "tag:yaml.org,2002:str", "a Unicode string" }, end_node }; struct test_node stream[] = { { T_SEQ, 0, 0, seq }, end_node }; --- test: Type family under perl.yaml.org yaml: | # The URI is 'tag:perl.yaml.org,2002:Text::Tabs' - !perl/Text::Tabs {} ruby: | [ YAML::DomainType.new( 'perl.yaml.org,2002', 'Text::Tabs', {} ) ] syck: | struct test_node map[] = { end_node }; struct test_node seq[] = { { T_MAP, "tag:perl.yaml.org,2002:Text::Tabs", 0, map }, end_node }; struct test_node stream[] = { { T_SEQ, 0, 0, seq }, end_node }; --- test: Type family under clarkevans.com yaml: | # The URI is 'tag:clarkevans.com,2003-02:timesheet' - !clarkevans.com,2003-02/timesheet {} ruby: | [ YAML::DomainType.new( 'clarkevans.com,2003-02', 'timesheet', {} ) ] syck: | struct test_node map[] = { end_node }; struct test_node seq[] = { { T_MAP, "tag:clarkevans.com,2003-02:timesheet", 0, map }, end_node }; struct test_node stream[] = { { T_SEQ, 0, 0, seq }, end_node }; --- test: URI Escaping yaml: | same: - !domain.tld,2002/type\x30 value - !domain.tld,2002/type0 value different: # As far as the YAML parser is concerned - !domain.tld,2002/type%30 value - !domain.tld,2002/type0 value ruby-setup: | YAML.add_domain_type( "domain.tld,2002", "type0" ) { |type, val| "ONE: #{val}" } YAML.add_domain_type( "domain.tld,2002", "type%30" ) { |type, val| "TWO: #{val}" } ruby: | { 'same' => [ 'ONE: value', 'ONE: value' ], 'different' => [ 'TWO: value', 'ONE: value' ] } syck: | struct test_node same[] = { { T_STR, "tag:domain.tld,2002:type0", "value" }, { T_STR, "tag:domain.tld,2002:type0", "value" }, end_node }; struct test_node diff[] = { { T_STR, "tag:domain.tld,2002:type%30", "value" }, { T_STR, "tag:domain.tld,2002:type0", "value" }, end_node }; struct test_node map[] = { { T_STR, 0, "same" }, { T_SEQ, 0, 0, same }, { T_STR, 0, "different" }, { T_SEQ, 0, 0, diff }, end_node }; struct test_node stream[] = { { T_MAP, 0, 0, map }, end_node }; --- test: URI Prefixing yaml: | # 'tag:domain.tld,2002:invoice' is some type family. invoice: !domain.tld,2002/^invoice # 'seq' is shorthand for 'tag:yaml.org,2002:seq'. # This does not effect '^customer' below # because it is does not specify a prefix. customers: !seq # '^customer' is shorthand for the full # notation 'tag:domain.tld,2002:customer'. - !^customer given : Chris family : Dumars ruby-setup: | YAML.add_domain_type( "domain.tld,2002", /(invoice|customer)/ ) { |type, val| if val.is_a? ::Hash scheme, domain, type = type.split( /:/, 3 ) val['type'] = "domain #{type}" val else raise YAML::Error, "Not a Hash in domain.tld/invoice: " + val.inspect end } ruby: | { "invoice"=> { "customers"=> [ { "given"=>"Chris", "type"=>"domain customer", "family"=>"Dumars" } ], "type"=>"domain invoice" } } --- test: Overriding anchors yaml: | anchor : &A001 This scalar has an anchor. override : &A001 > The alias node below is a repeated use of this value. alias : *A001 ruby: | { 'anchor' => 'This scalar has an anchor.', 'override' => "The alias node below is a repeated use of this value.\n", 'alias' => "The alias node below is a repeated use of this value.\n" } syck: | struct test_node map[] = { { T_STR, 0, "anchor" }, { T_STR, 0, "This scalar has an anchor." }, { T_STR, 0, "override" }, { T_STR, 0, "The alias node below is a repeated use of this value.\n" }, { T_STR, 0, "alias" }, { T_STR, 0, "The alias node below is a repeated use of this value.\n" }, end_node }; struct test_node stream[] = { { T_MAP, 0, 0, map }, end_node }; --- test: Flow and block formatting yaml: | empty: [] flow: [ one, two, three # May span lines, , four, # indentation is five ] # mostly ignored. block: - First item in top sequence - - Subordinate sequence entry - > A folded sequence entry - Sixth item in top sequence ruby: | { 'empty' => [], 'flow' => [ 'one', 'two', 'three', 'four', 'five' ], 'block' => [ 'First item in top sequence', [ 'Subordinate sequence entry' ], "A folded sequence entry\n", 'Sixth item in top sequence' ] } syck: | struct test_node empty[] = { end_node }; struct test_node flow[] = { { T_STR, 0, "one" }, { T_STR, 0, "two" }, { T_STR, 0, "three" }, { T_STR, 0, "four" }, { T_STR, 0, "five" }, end_node }; struct test_node inblock[] = { { T_STR, 0, "Subordinate sequence entry" }, end_node }; struct test_node block[] = { { T_STR, 0, "First item in top sequence" }, { T_SEQ, 0, 0, inblock }, { T_STR, 0, "A folded sequence entry\n" }, { T_STR, 0, "Sixth item in top sequence" }, end_node }; struct test_node map[] = { { T_STR, 0, "empty" }, { T_SEQ, 0, 0, empty }, { T_STR, 0, "flow" }, { T_SEQ, 0, 0, flow }, { T_STR, 0, "block" }, { T_SEQ, 0, 0, block }, end_node }; struct test_node stream[] = { { T_MAP, 0, 0, map }, end_node }; --- test: Complete mapping test yaml: | empty: {} flow: { one: 1, two: 2 } spanning: { one: 1, two: 2 } block: first : First entry second: key: Subordinate mapping third: - Subordinate sequence - { } - Previous mapping is empty. - A key: value pair in a sequence. A second: key:value pair. - The previous entry is equal to the following one. - A key: value pair in a sequence. A second: key:value pair. !float 12 : This key is a float. ? > ? : This key had to be protected. "\a" : This key had to be escaped. ? > This is a multi-line folded key : Whose value is also multi-line. ? this also works as a key : with a value at the next line. ? - This key - is a sequence : - With a sequence value. ? This: key is a: mapping : with a: mapping value. ruby: | { 'empty' => {}, 'flow' => { 'one' => 1, 'two' => 2 }, 'spanning' => { 'one' => 1, 'two' => 2 }, 'block' => { 'first' => 'First entry', 'second' => { 'key' => 'Subordinate mapping' }, 'third' => [ 'Subordinate sequence', {}, 'Previous mapping is empty.', { 'A key' => 'value pair in a sequence.', 'A second' => 'key:value pair.' }, 'The previous entry is equal to the following one.', { 'A key' => 'value pair in a sequence.', 'A second' => 'key:value pair.' } ], 12.0 => 'This key is a float.', "?\n" => 'This key had to be protected.', "\a" => 'This key had to be escaped.', "This is a multi-line folded key\n" => "Whose value is also multi-line.", 'this also works as a key' => 'with a value at the next line.', [ 'This key', 'is a sequence' ] => [ 'With a sequence value.' ] } } # Couldn't recreate map exactly, so we'll do a detailed check to be sure it's entact obj_y['block'].keys.each { |k| if Hash === k v = obj_y['block'][k] if k['This'] == 'key' and k['is a'] == 'mapping' and v['with a'] == 'mapping value.' obj_r['block'][k] = v end end } --- test: Literal explicit indentation yaml: | # Explicit indentation must # be given in all the three # following cases. leading spaces: |2 This value starts with four spaces. leading line break: |2 This value starts with a line break. leading comment indicator: |2 # first line starts with a # character. # Explicit indentation may # also be given when it is # not required. redundant: |2 This value is indented 2 spaces. ruby: | { 'leading spaces' => " This value starts with four spaces.\n", 'leading line break' => "\nThis value starts with a line break.\n", 'leading comment indicator' => "# first line starts with a\n# character.\n", 'redundant' => "This value is indented 2 spaces.\n" } --- test: Chomping and keep modifiers yaml: | clipped: | This has one newline. same as "clipped" above: "This has one newline.\n" stripped: |- This has no newline. same as "stripped" above: "This has no newline." kept: |+ This has two newlines. same as "kept" above: "This has two newlines.\n\n" ruby: | { 'clipped' => "This has one newline.\n", 'same as "clipped" above' => "This has one newline.\n", 'stripped' => 'This has no newline.', 'same as "stripped" above' => 'This has no newline.', 'kept' => "This has two newlines.\n\n", 'same as "kept" above' => "This has two newlines.\n\n" } --- test: Literal combinations yaml: | empty: | literal: | The \ ' " characters may be freely used. Leading white space is significant. Line breaks are significant. Thus this value contains one empty line and ends with a single line break, but does not start with one. is equal to: "The \\ ' \" characters may \ be\nfreely used. Leading white\n space \ is significant.\n\nLine breaks are \ significant.\nThus this value contains \ one\nempty line and ends with a\nsingle \ line break, but does\nnot start with one.\n" # Comments may follow a block # scalar value. They must be # less indented. # Modifiers may be combined in any order. indented and chomped: |2- This has no newline. also written as: |-2 This has no newline. both are equal to: " This has no newline." ruby: | { 'empty' => '', 'literal' => "The \\ ' \" characters may be\nfreely used. Leading white\n space " + "is significant.\n\nLine breaks are significant.\nThus this value contains one\n" + "empty line and ends with a\nsingle line break, but does\nnot start with one.\n", 'is equal to' => "The \\ ' \" characters may be\nfreely used. Leading white\n space " + "is significant.\n\nLine breaks are significant.\nThus this value contains one\n" + "empty line and ends with a\nsingle line break, but does\nnot start with one.\n", 'indented and chomped' => ' This has no newline.', 'also written as' => ' This has no newline.', 'both are equal to' => ' This has no newline.' } --- test: Folded combinations yaml: | empty: > one paragraph: > Line feeds are converted to spaces, so this value contains no line breaks except for the final one. multiple paragraphs: >2 An empty line, either at the start or in the value: Is interpreted as a line break. Thus this value contains three line breaks. indented text: > This is a folded paragraph followed by a list: * first entry * second entry Followed by another folded paragraph, another list: * first entry * second entry And a final folded paragraph. above is equal to: | This is a folded paragraph followed by a list: * first entry * second entry Followed by another folded paragraph, another list: * first entry * second entry And a final folded paragraph. # Explicit comments may follow # but must be less indented. ruby: | { 'empty' => '', 'one paragraph' => 'Line feeds are converted to spaces, so this value' + " contains no line breaks except for the final one.\n", 'multiple paragraphs' => "\nAn empty line, either at the start or in the value:\n" + "Is interpreted as a line break. Thus this value contains three line breaks.\n", 'indented text' => "This is a folded paragraph followed by a list:\n" + " * first entry\n * second entry\nFollowed by another folded paragraph, " + "another list:\n\n * first entry\n\n * second entry\n\nAnd a final folded paragraph.\n", 'above is equal to' => "This is a folded paragraph followed by a list:\n" + " * first entry\n * second entry\nFollowed by another folded paragraph, " + "another list:\n\n * first entry\n\n * second entry\n\nAnd a final folded paragraph.\n" } --- test: Single quotes yaml: | empty: '' second: '! : \ etc. can be used freely.' third: 'a single quote '' must be escaped.' span: 'this contains six spaces and one line break' is same as: "this contains six spaces\nand one line break" ruby: | { 'empty' => '', 'second' => '! : \\ etc. can be used freely.', 'third' => "a single quote ' must be escaped.", 'span' => "this contains six spaces\nand one line break", 'is same as' => "this contains six spaces\nand one line break" } --- test: Double quotes yaml: | empty: "" second: "! : etc. can be used freely." third: "a \" or a \\ must be escaped." fourth: "this value ends with an LF.\n" span: "this contains four \ spaces" is equal to: "this contains four spaces" ruby: | { 'empty' => '', 'second' => '! : etc. can be used freely.', 'third' => 'a " or a \\ must be escaped.', 'fourth' => "this value ends with an LF.\n", 'span' => "this contains four spaces", 'is equal to' => "this contains four spaces" } --- test: Unquoted strings yaml: | first: There is no unquoted empty string. second: 12 ## This is an integer. third: !str 12 ## This is a string. span: this contains six spaces and one line break indicators: this has no comments. #:foo and bar# are both text. flow: [ can span lines, # comment like this ] note: { one-line keys: but multi-line values } ruby: | { 'first' => 'There is no unquoted empty string.', 'second' => 12, 'third' => '12', 'span' => "this contains six spaces\nand one line break", 'indicators' => "this has no comments. #:foo and bar# are both text.", 'flow' => [ 'can span lines', 'like this' ], 'note' => { 'one-line keys' => 'but multi-line values' } } --- test: Spanning sequences yaml: | # The following are equal seqs # with different identities. flow: [ one, two ] spanning: [ one, two ] block: - one - two ruby: | { 'flow' => [ 'one', 'two' ], 'spanning' => [ 'one', 'two' ], 'block' => [ 'one', 'two' ] } --- test: Flow mappings yaml: | # The following are equal maps # with different identities. flow: { one: 1, two: 2 } block: one: 1 two: 2 ruby: | { 'flow' => { 'one' => 1, 'two' => 2 }, 'block' => { 'one' => 1, 'two' => 2 } } --- test: Representations of 12 yaml: | - 12 # An integer # The following scalars # are loaded to the # string value '1' '2'. - !str 12 - '12' - "12" - "\ 1\ 2\ " # Strings containing paths and regexps can be unquoted: - /foo/bar - d:/foo/bar - foo/bar - /a.*b/ ruby: | [ 12, '12', '12', '12', '12', '/foo/bar', 'd:/foo/bar', 'foo/bar', '/a.*b/' ] --- test: "Null" yaml: | canonical: ~ english: null # This sequence has five # entries, two with values. sparse: - ~ - 2nd entry - Null - 4th entry - four: This mapping has five keys, only two with values. ruby: | { 'canonical' => nil, 'english' => nil, 'sparse' => [ nil, '2nd entry', nil, '4th entry', nil ], 'four' => 'This mapping has five keys, only two with values.' } --- test: Omap yaml: | # Explicitly typed dictionary. Bestiary: !omap - aardvark: African pig-like ant eater. Ugly. - anteater: South-American ant eater. Two species. - anaconda: South-American constrictor snake. Scary. # Etc. ruby: | { 'Bestiary' => YAML::Omap[ 'aardvark', 'African pig-like ant eater. Ugly.', 'anteater', 'South-American ant eater. Two species.', 'anaconda', 'South-American constrictor snake. Scary.' ] } --- test: Pairs yaml: | # Explicitly typed pairs. tasks: !pairs - meeting: with team. - meeting: with boss. - break: lunch. - meeting: with client. ruby: | { 'tasks' => YAML::Pairs[ 'meeting', 'with team.', 'meeting', 'with boss.', 'break', 'lunch.', 'meeting', 'with client.' ] } --- test: Set yaml: | # Explicitly typed set. baseball players: !set Mark McGwire: Sammy Sosa: Ken Griffey: ruby: | { 'baseball players' => YAML::Set[ 'Mark McGwire', nil, 'Sammy Sosa', nil, 'Ken Griffey', nil ] } --- test: Boolean yaml: | false: used as key logical: true answer: no ruby: | { false => 'used as key', 'logical' => true, 'answer' => false } --- test: Integer yaml: | canonical: 12345 decimal: +12,345 octal: 014 hexadecimal: 0xC ruby: | { 'canonical' => 12345, 'decimal' => 12345, 'octal' => 12, 'hexadecimal' => 12 } --- test: Float yaml: | canonical: 1.23015e+3 exponential: 12.3015e+02 fixed: 1,230.15 negative infinity: -.inf not a number: .NaN ruby: | { 'canonical' => 1230.15, 'exponential' => 1230.15, 'fixed' => 1230.15, 'negative infinity' => -1.0/0.0, 'not a number' => 0.0/0.0 } if obj_y['not a number'].nan? # NaN comparison doesn't work right against 0.0/0.0 obj_r['not a number'] = obj_y['not a number'] end --- test: Timestamp yaml: | canonical: 2001-12-15T02:59:43.1Z valid iso8601: 2001-12-14t21:59:43.10-05:00 space separated: 2001-12-14 21:59:43.10 -05:00 date (noon UTC): 2002-12-14 ruby: | { 'canonical' => YAML::mktime( 2001, 12, 15, 2, 59, 43, 0.10 ), 'valid iso8601' => YAML::mktime( 2001, 12, 14, 21, 59, 43, 0.10, "-05:00" ), 'space separated' => YAML::mktime( 2001, 12, 14, 21, 59, 43, 0.10, "-05:00" ), 'date (noon UTC)' => Date.new( 2002, 12, 14 ) } syck: | struct test_node map[] = { { T_STR, 0, "canonical" }, { T_STR, 0, "2001-12-15T02:59:43.1Z" }, { T_STR, 0, "valid iso8601" }, { T_STR, 0, "2001-12-14t21:59:43.10-05:00" }, { T_STR, 0, "space separated" }, { T_STR, 0, "2001-12-14 21:59:43.10 -05:00" }, { T_STR, 0, "date (noon UTC)" }, { T_STR, 0, "2002-12-14" }, end_node }; struct test_node stream[] = { { T_MAP, 0, 0, map }, end_node }; --- test: Binary yaml: | canonical: !binary "\ R0lGODlhDAAMAIQAAP//9/X17unp5WZmZgAAAOfn515eXvPz7Y6OjuDg4J+fn5\ OTk6enp56enmlpaWNjY6Ojo4SEhP/++f/++f/++f/++f/++f/++f/++f/++f/+\ +f/++f/++f/++f/++f/++SH+Dk1hZGUgd2l0aCBHSU1QACwAAAAADAAMAAAFLC\ AgjoEwnuNAFOhpEMTRiggcz4BNJHrv/zCFcLiwMWYNG84BwwEeECcgggoBADs=" base64: !binary | R0lGODlhDAAMAIQAAP//9/X17unp5WZmZgAAAOfn515eXvPz7Y6OjuDg4J+fn5 OTk6enp56enmlpaWNjY6Ojo4SEhP/++f/++f/++f/++f/++f/++f/++f/++f/+ +f/++f/++f/++f/++f/++SH+Dk1hZGUgd2l0aCBHSU1QACwAAAAADAAMAAAFLC AgjoEwnuNAFOhpEMTRiggcz4BNJHrv/zCFcLiwMWYNG84BwwEeECcgggoBADs= description: > The binary value above is a tiny arrow encoded as a gif image. ruby-setup: | arrow_gif = "GIF89a\f\000\f\000\204\000\000\377\377\367\365\365\356\351\351\345fff\000\000\000\347\347\347^^^\363\363\355\216\216\216\340\340\340\237\237\237\223\223\223\247\247\247\236\236\236iiiccc\243\243\243\204\204\204\377\376\371\377\376\371\377\376\371\377\376\371\377\376\371\377\376\371\377\376\371\377\376\371\377\376\371\377\376\371\377\376\371\377\376\371\377\376\371\377\376\371!\376\016Made with GIMP\000,\000\000\000\000\f\000\f\000\000\005, \216\2010\236\343@\024\350i\020\304\321\212\010\034\317\200M$z\357\3770\205p\270\2601f\r\e\316\001\303\001\036\020' \202\n\001\000;" ruby: | { 'canonical' => arrow_gif, 'base64' => arrow_gif, 'description' => "The binary value above is a tiny arrow encoded as a gif image.\n" } --- test: Merge key yaml: | --- - &CENTER { x: 1, y: 2 } - &LEFT { x: 0, y: 2 } - &BIG { r: 10 } - &SMALL { r: 1 } # All the following maps are equal: - # Explicit keys x: 1 y: 2 r: 10 label: center/big - # Merge one map << : *CENTER r: 10 label: center/big - # Merge multiple maps << : [ *CENTER, *BIG ] label: center/big - # Override << : [ *BIG, *LEFT, *SMALL ] x: 1 label: center/big ruby-setup: | center = { 'x' => 1, 'y' => 2 } left = { 'x' => 0, 'y' => 2 } big = { 'r' => 10 } small = { 'r' => 1 } node1 = { 'x' => 1, 'y' => 2, 'r' => 10, 'label' => 'center/big' } node2 = center.dup node2.update( { 'r' => 10, 'label' => 'center/big' } ) node3 = big.dup node3.update( center ) node3.update( { 'label' => 'center/big' } ) node4 = small.dup node4.update( left ) node4.update( big ) node4.update( { 'x' => 1, 'label' => 'center/big' } ) ruby: | [ center, left, big, small, node1, node2, node3, node4 ] --- test: Default key yaml: | --- # Old schema link with: - library1.dll - library2.dll --- # New schema link with: - = : library1.dll version: 1.2 - = : library2.dll version: 2.3 ruby: | y = YAML::Stream.new y.add( { 'link with' => [ 'library1.dll', 'library2.dll' ] } ) obj_h = Hash[ 'version' => 1.2 ] obj_h.default = 'library1.dll' obj_h2 = Hash[ 'version' => 2.3 ] obj_h2.default = 'library2.dll' y.add( { 'link with' => [ obj_h, obj_h2 ] } ) documents: 2 --- test: Special keys yaml: | "!": These three keys "&": had to be quoted "=": and are normal strings. # NOTE: the following node should NOT be serialized this way. encoded node : !special '!' : '!type' !special|canonical '&' : 12 = : value # The proper way to serialize the above node is as follows: node : !!type &12 value ruby: | { '!' => 'These three keys', '&' => 'had to be quoted', '=' => 'and are normal strings.', 'encoded node' => YAML::PrivateType.new( 'type', 'value' ), 'node' => YAML::PrivateType.new( 'type', 'value' ) } ruby-mkrf-0.2.3.orig/test/sample_files/syck-0.55/ext/ruby/yts/YtsAnchorAlias.yml0000644000000000000000000000226611672453175026077 0ustar rootroot--- %YAML:1.0 test: Simple Alias Example brief: > If you need to refer to the same item of data twice, you can give that item an alias. The alias is a plain string, starting with an ampersand. The item may then be referred to by the alias throughout your document by using an asterisk before the name of the alias. This is called an anchor. yaml: | - &showell Steve - Clark - Brian - Oren - *showell python: | [ [ 'Steve', 'Clark', 'Brian', 'Oren', 'Steve'] ] ruby-setup: | showell = 'Steve' ruby: | [ showell, 'Clark', 'Brian', 'Oren', showell ] --- test: Alias of a Mapping brief: > An alias can be used on any item of data, including sequences, mappings, and other complex data types. yaml: | - &hello Meat: pork Starch: potato - banana - *hello python: | [ [ {'Meat': 'pork', 'Starch': 'potato'}, 'banana', {'Meat': 'pork', 'Starch': 'potato'}, ] ] ruby-setup: | hello = { 'Meat' => 'pork', 'Starch' => 'potato' } ruby: | [ hello, 'banana', hello ] ruby-mkrf-0.2.3.orig/test/sample_files/syck-0.55/ext/ruby/yts/YtsYpath.yml0000644000000000000000000000563011672453175024776 0ustar rootrootdata: apple: red ypath: / expected: - / --- data: apple: red ypath: . expected: - / --- data: apple: red ypath: /* expected: - /apple --- data: apple: red lemon: yellow ypath: /* expected: - /apple - /lemon unordered: 1 --- data: fruit: banana: yellow vegetable: carrot: orange ypath: //. expected: - / - /fruit - /fruit/banana - /vegetable - /vegetable/carrot unordered: 1 --- data: one: two: xxx ypath: //two/.. expected: - /one --- data: apple: red ypath: /apple expected: - /apple --- data: apple: red lemon: yellow ypath: /"lemon" expected: - /lemon --- data: apple: red lemon: yellow ypath: /'lemon' expected: - /lemon --- data: apple: red lemon: yellow ypath: /lemon expected: - /lemon --- data: - apple - lemon ypath: /0 expected: - /0 --- data: apple: red lemon: yellow ypath: /orange expected: [] --- data: apple: red ypath: ./. expected: - / --- data: fruit: banana: yellow vegetable: carrot: orange ypath: /fruit/banana expected: - /fruit/banana --- data: fruit: banana: yellow vegetable: carrot: orange ypath: fruit/banana expected: - /fruit/banana --- data: names: - Steve Howell - Clark Evans ypath: /names/0 expected: - /names/0 --- data: names: - first: Clark last: Evans - first: Steve last: Howell ypath: /names/1/first expected: - /names/1/first --- data: names: - first: Clark last: Evans - first: Steve last: Howell ypath: /names/*/first expected: - /names/0/first - /names/1/first --- data: names: python-heads: - first: Clark last: Evans - first: Steve last: Howell perl-heads: - first: Brian last: Ingerson ypath: names//first expected: - /names/python-heads/0/first - /names/python-heads/1/first - /names/perl-heads/0/first --- data: task: - name: wake foo: bar - name: eat task: - name: veggies - name: meats - name: sleep ypath: //task expected: - /task - /task/1/task --- data: - one: name: xxx - two: name: yyy - three: name: zzz ypath: /*/one/name|//three/name expected: - /0/one/name - /2/three/name --- data: apple: red ypath: .|/apple|apple|/|. expected: - / - /apple --- data: - one: name: xxx - two: name: yyy - three: name: zzz ypath: /*/(one|three)/name expected: - /0/one/name - /2/three/name --- data: - one: xxx - two: yyy - one: zzz ypath: /*[one] expected: - /0 - /2 --- data: - food: Hamburger calories: 900 - food: Fries calories: 650 - food: Soft Drink calories: 350 ypath: //food[.=Fries] expected: - /1/food ruby-mkrf-0.2.3.orig/test/sample_files/syck-0.55/ext/ruby/yts/YtsFlowCollections.yml0000644000000000000000000000347011672453175027017 0ustar rootroot--- test: Simple Inline Array brief: > Sequences can be contained on a single line, using the inline syntax. Separate each entry with commas and enclose in square brackets. yaml: | --- seq: [ a, b, c ] python: | [ { 'seq': [ 'a', 'b', 'c' ] } ] ruby: | { 'seq' => [ 'a', 'b', 'c' ] } --- test: Simple Inline Hash brief: > Mapping can also be contained on a single line, using the inline syntax. Each key-value pair is separated by a colon, with a comma between each entry in the mapping. Enclose with curly braces. yaml: | --- hash: { name: Steve, foo: bar } python: | [ { 'hash': {'name': 'Steve', 'foo': 'bar'} } ] ruby: | { 'hash' => { 'name' => 'Steve', 'foo' => 'bar' } } --- test: Multi-line Inline Collections brief: > Both inline sequences and inline mappings can span multiple lines, provided that you indent the additional lines. yaml: | languages: [ Ruby, Perl, Python ] websites: { YAML: yaml.org, Ruby: ruby-lang.org, Python: python.org, Perl: use.perl.org } ruby: | { 'languages' => [ 'Ruby', 'Perl', 'Python' ], 'websites' => { 'YAML' => 'yaml.org', 'Ruby' => 'ruby-lang.org', 'Python' => 'python.org', 'Perl' => 'use.perl.org' } } --- test: Commas in Values brief: > List items in collections are delimited by commas, but there must be a space after each comma. This allows you to add numbers without quoting. yaml: | attendances: [ 45,123, 70,000, 17,222 ] python: | [ {'attendances': [ 45123, 70000, 17222 ]} ] ruby: | { 'attendances' => [ 45123, 70000, 17222 ] } ruby-mkrf-0.2.3.orig/test/sample_files/syck-0.55/ext/ruby/yts/YtsFoldedScalars.yml0000644000000000000000000001154611672453175026422 0ustar rootroot--- %YAML:1.0 test: Single ending newline brief: > A pipe character, followed by an indented block of text is treated as a literal block, in which newlines are preserved throughout the block, including the final newline. yaml: | --- this: | Foo Bar ruby: | { 'this' => "Foo\nBar\n" } python: | [ { 'this': "Foo\nBar\n" } ] --- test: The '+' indicator brief: > The '+' indicator says to keep newlines at the end of text blocks. yaml: | normal: | extra new lines not kept preserving: |+ extra new lines are kept dummy: value ruby: | { 'normal' => "extra new lines not kept\n", 'preserving' => "extra new lines are kept\n\n\n", 'dummy' => 'value' } python: | [ { 'normal': "extra new lines not kept\n", 'preserving': "extra new lines are kept\n\n\n", 'dummy': 'value' } ] --- test: Three trailing newlines in literals brief: > To give you more control over how space is preserved in text blocks, YAML has the keep '+' and chomp '-' indicators. The keep indicator will preserve all ending newlines, while the chomp indicator will strip all ending newlines. yaml: | clipped: | This has one newline. same as "clipped" above: "This has one newline.\n" stripped: |- This has no newline. same as "stripped" above: "This has no newline." kept: |+ This has four newlines. same as "kept" above: "This has four newlines.\n\n\n\n" ruby: | { 'clipped' => "This has one newline.\n", 'same as "clipped" above' => "This has one newline.\n", 'stripped' => 'This has no newline.', 'same as "stripped" above' => 'This has no newline.', 'kept' => "This has four newlines.\n\n\n\n", 'same as "kept" above' => "This has four newlines.\n\n\n\n" } not_yet_in_python: | [ { 'clipped': "This has one newline.\n", 'same as "clipped" above': "This has one newline.\n", 'stripped': 'This has no newline.', 'same as "stripped" above': 'This has no newline.', 'kept': "This has four newlines.\n\n\n\n", 'same as "kept" above': "This has four newlines.\n\n\n\n" } ] --- test: Extra trailing newlines with spaces brief: > Normally, only a single newline is kept from the end of a literal block, unless the keep '+' character is used in combination with the pipe. The following example will preserve all ending whitespace since the last line of both literal blocks contains spaces which extend past the indentation level. yaml: | --- this: | Foo kept: |+ Foo ruby: | { 'this' => "Foo\n\n \n", 'kept' => "Foo\n\n \n" } --- test: Folded Block in a Sequence brief: > A greater-then character, followed by an indented block of text is treated as a folded block, in which lines of text separated by a single newline are concatenated as a single line. yaml: | --- - apple - banana - > can't you see the beauty of yaml? hmm - dog python: | [ [ 'apple', 'banana', "can't you see the beauty of yaml? hmm\n", 'dog' ] ] ruby: | [ 'apple', 'banana', "can't you see the beauty of yaml? hmm\n", 'dog' ] --- test: Folded Block as a Mapping Value brief: > Both literal and folded blocks can be used in collections, as values in a sequence or a mapping. yaml: | --- quote: > Mark McGwire's year was crippled by a knee injury. source: espn python: | [ { 'quote': "Mark McGwire's year was crippled by a knee injury.\n", 'source': 'espn' } ] ruby: | { 'quote' => "Mark McGwire's year was crippled by a knee injury.\n", 'source' => 'espn' } --- test: Three trailing newlines in folded blocks brief: > The keep and chomp indicators can also be applied to folded blocks. yaml: | clipped: > This has one newline. same as "clipped" above: "This has one newline.\n" stripped: >- This has no newline. same as "stripped" above: "This has no newline." kept: >+ This has four newlines. same as "kept" above: "This has four newlines.\n\n\n\n" ruby: | { 'clipped' => "This has one newline.\n", 'same as "clipped" above' => "This has one newline.\n", 'stripped' => 'This has no newline.', 'same as "stripped" above' => 'This has no newline.', 'kept' => "This has four newlines.\n\n\n\n", 'same as "kept" above' => "This has four newlines.\n\n\n\n" } ruby-mkrf-0.2.3.orig/test/sample_files/syck-0.55/ext/ruby/yts/YtsMapInSeq.yml0000644000000000000000000000000011672453175025350 0ustar rootrootruby-mkrf-0.2.3.orig/test/sample_files/syck-0.55/ext/ruby/yts/YtsRubyTests.yml0000644000000000000000000001163411672453175025656 0ustar rootroot--- %YAML:1.0 test: Symbols brief: > Ruby Symbols can be simply serialized using the !ruby/symbol transfer method, or the abbreviated !ruby/sym. yaml: | simple symbol: !ruby/symbol Simple shortcut syntax: !ruby/sym Simple symbols in seqs: - !ruby/symbol ValOne - !ruby/symbol ValTwo - !ruby/symbol ValThree symbols in maps: - !ruby/symbol MapKey: !ruby/symbol MapValue ruby: | { 'simple symbol' => :Simple, 'shortcut syntax' => :Simple, 'symbols in seqs' => [ :ValOne, :ValTwo, :ValThree ], 'symbols in maps' => [ { :MapKey => :MapValue } ] } --- test: Ranges brief: > Ranges are serialized with the !ruby/range type family. yaml: | normal range: !ruby/range 10..20 exclusive range: !ruby/range 11...20 negative range: !ruby/range -1..-5 ? !ruby/range 0..40 : range as a map key ruby: | { 'normal range' => (10..20), 'exclusive range' => (11...20), 'negative range' => (-1..-5), (0..40) => 'range as a map key' } --- test: Regexps brief: > Regexps may be serialized to YAML, both its syntax and any modifiers. yaml: | case-insensitive: !ruby/regexp "/George McFly/i" complex: !ruby/regexp "/\\A\"((?:[^\"]|\\\")+)\"/" simple: !ruby/regexp '/a.b/' ruby: | { 'simple' => /a.b/, 'complex' => /\A"((?:[^"]|\")+)"/, 'case-insensitive' => /George McFly/i } --- test: Struct class brief: > The Ruby Struct class is registered as a YAML builtin type through Ruby, so it can safely be serialized. To use it, first make sure you define your Struct with Struct::new. Then, you are able to serialize with Struct#to_yaml and unserialize from a YAML stream. yaml: | --- !ruby/struct:BookStruct author: Yukihiro Matsumoto title: Ruby in a Nutshell year: 2002 isbn: 0-596-00214-9 ruby-setup: | book_struct = Struct::new( "BookStruct", :author, :title, :year, :isbn ) ruby: | book_struct.new( "Yukihiro Matsumoto", "Ruby in a Nutshell", 2002, "0-596-00214-9" ) --- test: Nested Structs brief: > As with other YAML builtins, you may nest the Struct inside of other Structs or other data types. yaml: | - !ruby/struct:FoodStruct name: Nachos ingredients: - Mission Chips - !ruby/struct:FoodStruct name: Tostitos Nacho Cheese ingredients: - Milk and Enzymes - Jack Cheese - Some Volatile Chemicals taste: Angelic - Sour Cream taste: Zesty - !ruby/struct:FoodStruct name: Banana Cream Pie ingredients: - Bananas - Creamy Stuff - And Such taste: Puffy ruby-setup: | food_struct = Struct::new( "FoodStruct", :name, :ingredients, :taste ) ruby: | [ food_struct.new( 'Nachos', [ 'Mission Chips', food_struct.new( 'Tostitos Nacho Cheese', [ 'Milk and Enzymes', 'Jack Cheese', 'Some Volatile Chemicals' ], 'Angelic' ), 'Sour Cream' ], 'Zesty' ), food_struct.new( 'Banana Cream Pie', [ 'Bananas', 'Creamy Stuff', 'And Such' ], 'Puffy' ) ] --- test: Objects brief: > YAML has generic support for serializing objects from any class available in Ruby. If using the generic object serialization, no extra code is needed. yaml: | --- !ruby/object:Zoolander name: Derek look: Blue Steel ruby-setup: | class Zoolander attr_accessor :name, :look def initialize( look ) @name = "Derek" @look = look end def ==( z ) self.name == z.name and self.look == z.look end end ruby: | Zoolander.new( "Blue Steel" ) --- test: Extending Kernel::Array brief: > When extending the Array class, your instances of such a class will dump as YAML sequences, tagged with a class name. yaml: | --- !ruby/array:MyArray - jacket - sweater - windbreaker ruby-setup: | class MyArray < ::Array; end ruby: | outerwear = MyArray.new outerwear << 'jacket' outerwear << 'sweater' outerwear << 'windbreaker' outerwear --- test: Extending Kernel::Hash brief: > When extending the Hash class, your instances of such a class will dump as YAML maps, tagged with a class name. yaml: | --- !ruby/hash:MyHash Black Francis: Frank Black Kim Deal: Breeders Joey Santiago: Martinis ruby-setup: | # Note that the @me attribute isn't dumped # because the default to_yaml is trained # to dump as a regular Hash. class MyHash < ::Hash attr_accessor :me def initialize @me = "Why" end end ruby: | pixies = MyHash.new pixies['Black Francis'] = 'Frank Black' pixies['Kim Deal'] = 'Breeders' pixies['Joey Santiago'] = 'Martinis' pixies ruby-mkrf-0.2.3.orig/test/sample_files/syck-0.55/ext/ruby/yts/YtsNullsAndEmpties.yml0000644000000000000000000000164011672453175026755 0ustar rootroot--- %YAML:1.0 test: Empty Sequence brief: > You can represent the empty sequence with an empty inline sequence. yaml: | empty: [] python: | [ { 'empty': [] } ] ruby: | { 'empty' => [] } --- test: Empty Mapping brief: > You can represent the empty mapping with an empty inline mapping. yaml: | empty: {} python: | [ { 'empty': {} } ] ruby: | { 'empty' => {} } --- test: Empty Sequence as Entire Document yaml: | --- [] python: | [ [] ] ruby: | [] --- test: Empty Mapping as Entire Document yaml: | --- {} python: | [ {} ] ruby: | {} --- test: Null as Document yaml: | --- ~ python: | [ None ] ruby: | nil --- test: Empty String brief: > You can represent an empty string with a pair of quotes. yaml: | --- '' python: | [ '' ] ruby: | '' ruby-mkrf-0.2.3.orig/test/sample_files/syck-0.55/ext/ruby/yts/YtsTypeTransfers.yml0000644000000000000000000001612711672453175026525 0ustar rootroot--- %YAML:1.0 test: Strings brief: > Any group of characters beginning with an alphabetic or numeric character is a string, unless it belongs to one of the groups below (such as an Integer or Time). yaml: | --- String ruby: | 'String' --- test: String characters brief: > A string can contain any alphabetic or numeric character, along with many punctuation characters, including the period, dash, space, quotes, exclamation, and question mark. yaml: | - What's Yaml? - It's for writing data structures in plain text. - And? - And what? That's not good enough for you? - No, I mean, "And what about Yaml?" - Oh, oh yeah. Uh.. Yaml for Ruby. ruby: | [ "What's Yaml?", "It's for writing data structures in plain text.", "And?", "And what? That's not good enough for you?", "No, I mean, \"And what about Yaml?\"", "Oh, oh yeah. Uh.. Yaml for Ruby." ] --- test: Indicators in Strings brief: > Be careful using indicators in strings. In particular, the comma, colon, and pound sign must be used carefully. yaml: | the colon followed by space is an indicator: but is a string:right here same for the pound sign: here we have it#in a string the comma can, honestly, be used in most cases: [ but not in, inline collections ] ruby: | { 'the colon followed by space is an indicator' => 'but is a string:right here', 'same for the pound sign' => 'here we have it#in a string', 'the comma can, honestly, be used in most cases' => [ 'but not in', 'inline collections' ] } --- test: Forcing Strings brief: > Any YAML type can be forced into a string using the explicit !str method. yaml: | date string: !str 2001-08-01 number string: !str 192 ruby: | { 'date string' => '2001-08-01', 'number string' => '192' } --- test: Single-quoted Strings brief: > You can also enclose your strings within single quotes, which allows use of slashes, colons, and other indicators freely. Inside single quotes, you can represent a single quote in your string by using two single quotes next to each other. yaml: | all my favorite symbols: '#:!/%.)' a few i hate: '&(*' why do i hate them?: 'it''s very hard to explain' ruby: | { 'all my favorite symbols' => '#:!/%.)', 'a few i hate' => '&(*', 'why do i hate them?' => 'it\'s very hard to explain' } --- test: Double-quoted Strings brief: > Enclosing strings in double quotes allows you to use escapings to represent ASCII and Unicode characters. yaml: | i know where i want my line breaks: "one here\nand another here\n" ruby: | { 'i know where i want my line breaks' => "one here\nand another here\n" } --- test: Multi-line Quoted Strings brief: > Both single- and double-quoted strings may be carried on to new lines in your YAML document. They must be indented a step and indentation is interpreted as a single space. yaml: | i want a long string: "so i'm going to let it go on and on to other lines until i end it with a quote." ruby: | { 'i want a long string' => "so i'm going to " + "let it go on and on to other lines " + "until i end it with a quote." } --- test: Plain scalars brief: > Unquoted strings may also span multiple lines, if they are free of YAML space indicators and indented. yaml: | - My little toe is broken in two places; - I'm crazy to have skied this way; - I'm not the craziest he's seen, since there was always the German guy who skied for 3 hours on a broken shin bone (just below the kneecap); - Nevertheless, second place is respectable, and he doesn't recommend going for the record; - He's going to put my foot in plaster for a month; - This would impair my skiing ability somewhat for the duration, as can be imagined. ruby: | [ "My little toe is broken in two places;", "I'm crazy to have skied this way;", "I'm not the craziest he's seen, since there was always " + "the German guy who skied for 3 hours on a broken shin " + "bone (just below the kneecap);", "Nevertheless, second place is respectable, and he doesn't " + "recommend going for the record;", "He's going to put my foot in plaster for a month;", "This would impair my skiing ability somewhat for the duration, " + "as can be imagined." ] --- test: 'Null' brief: > You can use the tilde '~' character for a null value. yaml: | name: Mr. Show hosted by: Bob and David date of next season: ~ ruby: | { 'name' => 'Mr. Show', 'hosted by' => 'Bob and David', 'date of next season' => nil } --- test: Boolean brief: > You can use 'true' and 'false' for boolean values. yaml: | Is Gus a Liar?: true Do I rely on Gus for Sustenance?: false ruby: | { 'Is Gus a Liar?' => true, 'Do I rely on Gus for Sustenance?' => false } --- test: Integers brief: > An integer is a series of numbers, optionally starting with a positive or negative sign. Integers may also contain commas for readability. yaml: | zero: 0 simple: 12 one-thousand: 1,000 negative one-thousand: -1,000 ruby: | { 'zero' => 0, 'simple' => 12, 'one-thousand' => 1000, 'negative one-thousand' => -1000 } python: | [ { 'zero': 0, 'simple': 12, 'one-thousand': 1000, 'negative one-thousand': -1000, } ] --- test: Integers as Map Keys brief: > An integer can be used a dictionary key. yaml: | 1: one 2: two 3: three python: | [ { 1: 'one', 2: 'two', 3: 'three', } ] ruby: | { 1 => 'one', 2 => 'two', 3 => 'three' } --- test: Floats brief: > Floats are represented by numbers with decimals, allowing for scientific notation, as well as positive and negative infinity and "not a number." yaml: | a simple float: 2.00 larger float: 1,000.09 scientific notation: 1.00009e+3 ruby: | { 'a simple float' => 2.0, 'larger float' => 1000.09, 'scientific notation' => 1000.09 } --- test: Time brief: > You can represent timestamps by using ISO8601 format, or a variation which allows spaces between the date, time and time zone. yaml: | iso8601: 2001-12-14t21:59:43.10-05:00 space seperated: 2001-12-14 21:59:43.10 -05:00 ruby: | { 'iso8601' => YAML::mktime( 2001, 12, 14, 21, 59, 43, 0.10, "-05:00" ), 'space seperated' => YAML::mktime( 2001, 12, 14, 21, 59, 43, 0.10, "-05:00" ) } --- test: Date brief: > A date can be represented by its year, month and day in ISO8601 order. yaml: | --- 1976-07-31 ruby: | Date.new( 1976, 7, 31 ) ruby-mkrf-0.2.3.orig/test/sample_files/syck-0.55/ext/ruby/yts/cookbook.rb0000644000000000000000000000665311672453175024627 0ustar rootroot# vim:sw=4:ts=4 # $Id: cookbook.rb,v 1.1 2003/06/05 17:30:03 whythluckystiff Exp $ # # Cookbook Generator for YamlTestingSuite # require 'erb/erbl' require 'yaml' ytsFiles = [ [ 'YtsBasicTests.yml', { 'name' => 'Collections', 'doc' => [] } ], [ 'YtsFlowCollections.yml', { 'name' => 'Inline Collections', 'doc' => [] } ], [ 'YtsTypeTransfers.yml', { 'name' => 'Basic Types', 'doc' => [] } ], [ 'YtsFoldedScalars.yml', { 'name' => 'Blocks', 'doc' => [] } ], [ 'YtsAnchorAlias.yml', { 'name' => 'Aliases and Anchors', 'doc' => [] } ], [ 'YtsDocumentSeparator.yml', { 'name' => 'Documents', 'doc' => [] } ], [ 'YtsRubyTests.yml', { 'name' => 'YAML For Ruby', 'doc' => [] } ] # [ 'YtsSpecificationExamples.yml', { # 'name' => 'Examples from the Specification', # 'doc' => [] # } ] ] ytsFiles.each do |yt| yt[1]['doc'] = YAML::load_stream( File.open( yt[0] ) ) yt[1]['href'] = yt[1]['name'].downcase.gsub( /\s+/, '_' ) end erb = ERbLight.new( <
    <% ytsFiles.each do |yt| %>
  • <%= yt[1]['name'] %>
  • <% if yt[1]['doc'].documents.length > 0 %>
      <% yt[1]['doc'].documents.each do |ydoc| ydoc['href'] = ydoc['test'].downcase.gsub( /\s+/, '_' ) if ydoc['test'] %>
    • <%= ydoc['test'] %>
    • <% end %>
    <% end %> <% end %>
<% ytsFiles.each do |yt| %>


<%= yt[1]['name'] %>

<% if yt[1]['doc'].documents.length > 0 yt[1]['doc'].documents.each do |ydoc| %>

<%= ydoc['test'] %>

Brief

<%= ydoc['brief'] %>

Yaml

 <%= ydoc['test'].gsub( ' ', ' ' ) if ydoc['test'] %>   
<%= ydoc['yaml'] %>

Ruby

 <%= ydoc['test'].gsub( ' ', ' ' ) if ydoc['test'] %>   
<% if ydoc.has_key?( 'ruby-setup' ) %>
<%= ydoc['ruby-setup'] %>
<% end %>
<%= ydoc['ruby'] %>
<% end end %> <% end %> TMPL ) puts erb.result ruby-mkrf-0.2.3.orig/test/sample_files/syck-0.55/ext/ruby/yts/index.yml0000644000000000000000000000030411672453175024311 0ustar rootroot- YtsBasicTests - YtsBlockMapping - YtsFlowCollections - YtsTypeTransfers - YtsFoldedScalars - YtsAnchorAlias - YtsDocumentSeparator - YtsNullsAndEmpties - YtsRubyTests - YtsSpecificationExamples ruby-mkrf-0.2.3.orig/test/sample_files/syck-0.55/ext/ruby/yts/YtsBlockSequence.yml0000644000000000000000000000000011672453175026416 0ustar rootrootruby-mkrf-0.2.3.orig/test/sample_files/syck-0.55/ext/ruby/README0000644000000000000000000003254111672453175022530 0ustar rootroot# # # .. yaml.rb ..... # . . # . . # . . # ....... v0.60 .. # # # # {o} # ^ # {o} # ^ # # Load this README! # # >> YAML::load( File.open( 'README' ) ) # --- %YAML:1.0 title: YAML.rb version: 0.60 author: [Why the Lucky Stiff, yaml-core@whytheluckystiff.net] websites: [http://www.yaml.org, http://yaml4r.sf.net, http://sf.net/projects/yaml4r/] installation: > YAML.rb depends on Racc, available in the RAA: http://www.ruby-lang.org/en/raa-list.rhtml?name=Racc Once Racc is installed, run the install.rb script in this distribution: ruby install.rb To run the included unit tests: ruby tests/basic.rb To run the new YamlTestingSuite: cd yts ruby yts.rb about: > From the specification: "YAML(tm) (rhymes with 'camel') is a straightforward machine parsable data serialization format designed for human readability and interaction with scripting languages such as Perl and Python. YAML is optimized for data serialization, formatted dumping, configuration files, log files, Internet messaging and filtering. This specification describes the YAML information model and serialization format. Together with the Unicode standard for characters, it provides all the information necessary to understand YAML Version 1.0 and construct computer programs to process it." For Ruby developers, YAML is a natural fit for object serialization and general data storage. Really, it's quite fantastic. Spreads right on your Rubyware like butter on bread! The possible uses for YAML are innumerable. Configuration files, custom internet protocols, documentation, the list goes on and on. Also, with YAML readers popping up for other languages (see YAML.pm and others), you can pass data easily to colleagues in distant lands, swamped in their archaic languages. YAML is a beacon of light, reaching out to them all. ;) If I can-- quickly, of course-- in the Pickaxe book-- my all-time favorite coding book-- Dave Thomas and Andy Hunt say: "When we discovered Ruby, we realized that we'd found what we'd been looking for. More than any other language with which we have worked, Ruby stays out of your way. You can concentrate on solving the problem at hand, instead of struggling with compiler and language issues. That's how it can help you become a better programmer: by giving you the chance to spend your time creating solutions for your users, not for the compiler." HeiL! So true. Ruby's elegance, its readability, its common sense! Such it is with YAML. YAML is completely readable, in fact much of its syntax parallels Ruby's own data structure syntax! Another one from the Pickaxe: "Ruby is easy to learn. Everyday tasks are simple to code and once you've done them, they are easy to maintain and grow. Apparently difficult things often turn out not to have been difficult after all. Ruby follows the Principle of Least Surprise--things work the way you would expect them to, with very few special cases or exceptions. And that really does make a difference when you're programming." A quick look at YAML and you can see your data structure immediately. If I compare it to SOAP or XML-RPC, the difference is immense. With XML-RPC, you can see the data structures, but its terribly verbose. More time is spent describing the structure than anything else. Again, the Principle of Least Surprise is wholly present in YAML. Thank God! Well, welcome to YAML.rb. Now let's look at the API and see what we're dealing with! lets show off: - section: Exporting objects to YAML explanation: > Ruby encourages objects to have their own exporting methods. Hence, YAML.rb adds #to_yaml methods for built-in types. The NilClass, FalseClass, TrueClass, Symbol, Range, Numeric, Date, Time, Regexp, String, Array, and Hash all contain the to_yaml method. example: | require 'yaml' h = { 'test' => 12, 'another' => 13 } puts h.to_yaml - section: Loading a single YAML document explanation: > Although you'll often want to store multiple YAML documents in a single file, YAML.rb has a mechanism for loading and storing a single document in a single file. I wanted to offer a simpler API for those who don't care for the multiple document styling and just want to store a single object. example: | require 'yaml' obj = YAML::load( File::open( "/tmp/yaml.store.1" ) ) - section: Loading an object from a string explanation: > Perhaps you get an object from an HTTP post. String objects can be loaded through the same YAML::load used with the File object above. You can also pass StringIO into YAML::load. example: | require 'yaml' obj = YAML::load( < [ 'armless', 'falling', 'birds' ] - section: Replacing PStore with YAML explanation: > PStore is an excellent utility for Ruby developers. Akin to Python's pickle, objects can be serialized to a file. In YAML.rb, the PStore API is replicated identically, as a drop-in replacement for PStore code. example: | require 'yaml' y = YAML::Store.new( "/tmp/yaml.store.1", :Indent => 2, :Separator => '---.pstore' ) y.transaction do y['names'] = ['Crispin', 'Glover'] y['hello'] = {'hi' => 'hello', 'yes' => 'YES!!' } end - section: Exporting multiple documents to YAML explanation: > A single YAML file can store several documents, each opened with a YAML separator ('---'). This can be especially useful for streaming data over a socket or for separating log file entries (as see in the spec -- http://yaml.org/spec/). This release writes all documents out upon calling YAML::Stream#emit, but future releases will allow document writing to stream individually. example: | y = YAML::Stream.new( :Indent => 2, :UseVersion => true ) y.add( {'my_regex' => /hello [Jj][Aa][Mm][Ee][Ss]/, 'my_age' => 90 } ) y.add( {'hi' => 'wow!', 'bye' => 'wow!'} ) y.add( {['Red Socks','Boston'] => ['One', 'Two', 'Three']} ) y.add( [true, false, false] ) puts y.emit - section: Loading multiple documents at once into a YAML::Stream explanation: > In using YAML::Stream to write your objects, you may find that you want a quick way to load several documents at once back into a YAML::Stream, for editing and rewriting. example: | require 'yaml' File.open( "/home/why/.personalrc", "rw" ) { |rc| # Load the objects from the file y = YAML::load_stream( rc ) # Make edits to the objects doc2 = y.documents[2] doc2['car'] = '1997 Subaru Outback' y.edit( 2, doc2 ) # Save back out rc.rewind rc.write( y.emit ) rc.close } - section: Loading multiple documents from a YAML stream explanation: | When reading YAML from a socket or a pipe, you should consider using the event-based parser, which will parse documents one at a time. example: | require 'yaml' log = File.open( "/var/log/apache.yaml" ) yp = YAML::load_documents( log ) { |doc| puts "#{doc['at']} #{doc['type']} #{doc['url}" } # Note the YAML document embedded in the YAML document! cheat sheet: | # A YAML reference card --- %YAML:1.0 Collection indicators: '? ' : Key indicator. ': ' : Key / value separator. '- ' : Nested series entry indicator. ', ' : Separate in-line branch entries. '[]' : Surround in-line series branch. '{}' : Surround in-line keyed branch. Scalar indicators: '''' : Surround in-line unescaped scalar ('' escaped '). '"' : Surround in-line escaped scalar (see escape codes below). '|' : Block scalar indicator. '>' : Folded scalar indicator. '-' : Strip chomp modifier ('|-' or '>-'). '+' : Keep chomp modifier ('|+' or '>+'). int : Explicit indentation modifier ('|10' or '>2'). # Modifiers can be combined ('|2-', '>+10'). Alias indicators: '&' : Anchor property. '*' : Alias indicator. Transfer indicators: '!' : Transfer method indicator. '!!' : Transfer method with private type family. '^' : Establish/use global type family prefix. '|' : Separate global type family from format. Document indicators: '%' : Directive indicator. '---' : Document separator. '...' : Document terminator. Misc indicators: ' #' : Throwaway comment indicator. '=' : Default value map key. '<<' : Insert keys from map key. Core types: ### Almost never given explicitly '!map' : [ Hash table, dictionary, mapping ] '!seq' : [ List, array, tuple, vector, sequence ] '!str' : Unicode string Language Independent Scalar types: [ , ~, null ] : Null (no value). [ 1,234, 0x4D2, 02333, 20:34 ] : [ Decimal int, Hexadecimal, Octal, Base60 ] [ 1,230.15, 12.3015e+02, 20:20.15 ] : [ Fixed float, Exponential, Base60 ] [ .inf, -.Inf, .NAN ] : [ Infinity (float), Negative, Not a number ] [ +, true, Yes, ON ] : Boolean true [ -, false, No, OFF ] : Boolean false ? !binary > R0lG...BADS= : Base 64 binary value. Escape codes: Numeric : { "\xXX": 8-bit, "\uXXXX": 16-bit, "\UXXXXXXXX": 32-bit } Protective: { "\\": '\', "\"": '"', "\ ": ' ' } C: { "\a": BEL, "\b": BS, "\f": FF, "\n": LF, "\r": CR, "\t": TAB, "\v": VTAB } Additional: { "\e": ESC, "\0": NUL, "\_": NBSP, "\N": NEL, "\L": LS, "\P": PS } compliance: - feature: YAML Separators supported?: (Yes) notes: Custom YAML separators are allowed and will be used in exporting multiple documents. - feature: YAML directives supported?: (Yes) notes: > Directives are parsed correctly, but are of little use. The TAB directive is completely ignored, as tabs are not supported at this time. - feature: Transfer methods supported?: (Yes) - feature: Private types supported?: (Yes) - feature: URI Escaping supported?: (Yes) - feature: URI Prefixing supported?: (Yes) - feature: Throwaway comments supported?: (Yes) - feature: Anchors supported?: (Yes) - feature: Aliases supported?: (Yes) - feature: Sequences supported?: (Yes) notes: Nested and flow both supported. - feature: Mappings supported?: (Yes) notes: Nested and flow both supported. - feature: Key indicators supported?: (Yes) - feature: Explicit indent supported?: (Yes) - feature: Chomping supported?: (Yes) - feature: Literal scalar supported?: (Yes) - feature: Folded scalar supported?: (Yes) - feature: Unquoted scalars supported?: (Yes) notes: No support for spanning unquoted. - feature: Single-quoted scalars supported?: (Yes) - feature: Double-quoted scalars supported?: (Yes) - feature: Escape characters supported?: (Yes) notes: Most should be. - feature: Strings supported?: (Yes) - feature: Null supported?: (Yes) feature: Canonical and english. - feature: Boolean supported?: (Yes) notes: Canonical and english. - feature: Integer supported?: (Yes) notes: Canonical, oct, dec, and hex. - feature: Float supported?: (Yes) notes: Canonical, exp, fix, english - feature: Time supported?: (Yes) notes: Canonical, iso8601, spaced and ymd (as Date). - feature: Binary supported?: (Yes) - feature: Default key supported?: (Yes) acknowledgements: - who: Brian Ingerson why?: | Ingy's YAML.pm was INDISPENSABLE in writing this library. In fact, most of the emitter code follows the YAML.pm code quite closely. I also borrowed from his testing suite. Hopefully in the near future, YAML.rb and YAML.pm will use the same testing suite. Brian is also the creator of YAML. I'd say he's the free software equivalent of an Olympic long jumper. email: ingy@ttul.org - who: Steve Howell why?: | Comrade on the Yaml-core mailing list. He's working on the Python YAML implementation. Very encouraging of this project. I plan on stealing his Ruby code to handle circular data structures. Steve has also contributed to the YAML Testing Suite. email: showell@zipcon.net - who: Clark Evans why?: | Clark showed immediate excitement upon discovery of YAML.rb. And at that point I hadn't really done much yet, but the encouragement sure helped alot. - who: Oren Ben-Kiki why?: | For his work on the spec and the quick reference. All three of these guys have built a well-written specification, paying great attention to details. - who: Yukihiro Matsumoto why?: | Creator of the Ruby language. The most innovative man in software development bar none! - who: qwilk why?: | Long-time friend at desktopian.org, developer of Blackbox for Windows, Robin Hood web server. Excellent chum, persistent BeOS fanatic and leader of a generation of shell folk. ruby-mkrf-0.2.3.orig/test/sample_files/syck-0.55/ext/ruby/samples/0000755000000000000000000000000011672453175023307 5ustar rootrootruby-mkrf-0.2.3.orig/test/sample_files/syck-0.55/ext/ruby/samples/okayNews-modules.rb0000644000000000000000000000131611672453175027103 0ustar rootroot require 'okay/news' p YAML::load( DATA ) __END__ --- %YAML:1.0 !okay/news title: whytheluckystiff.net link: http://www.whytheluckystiff.net/ description: Home remedies for braindeath. updatePeriod: 01:00:00 items: - !okay/news/item pubTime: 2002-10-23T09:03:40.00-06:00 link: http://whytheluckystiff.net/arch/2002/10/23#1035385420 dc;test: 1 description: > Considering the "discussion"="http://philringnalda.com/archives/002359.php" - !okay/news/item pubTime: 2002-10-22T23:46:57.00-06:00 link: http://whytheluckystiff.net/arch/2002/10/22#1035352017 dc;test: 1 description: > Last night I hung out at this hotel with my relatives, all in town to see ruby-mkrf-0.2.3.orig/test/sample_files/syck-0.55/ext/ruby/samples/okayRpc-server.rb0000644000000000000000000000615311672453175026555 0ustar rootroot# # The demo server from whytheluckystiff.net. # require 'okay/rpc' s = Okay::RPC::ModRubyServer.new s.name = "The !okay/rpc Demo Server" s.add_author( 'why the lucky stiff', 'okay-rpc@whytheluckystiff.net', 'http://whytheluckystiff.net/' ) s.about = < y[1] } } # examples.addtwo s.add_handler( "examples.addtwo", %w(int int int), "Add two integers together and return the result." ) { |meth| meth.params[0] + meth.params[1] } # examples.addtwofloat s.add_handler( "examples.addtwofloat", %w(float float float), "Add two floats together and return the result." ) { |meth| meth.params[0] + meth.params[1] } # examples.stringecho s.add_handler( "examples.stringecho", %w(str str), "Accepts a string parameter, returns the string." ) { |meth| meth.params[0] } # examples.echo s.add_handler( "examples.echo", %w(str str), "Accepts a string parameter, returns the entire incoming payload." ) { |meth| meth } # examples.base64 s.add_handler( "examples.base64", %w(str str), "Accepts a base64 parameter and returns it decoded as a string." ) { |meth| [meth.params[0]].pack( "m" ) } # examples.invertBooleans s.add_handler( "examples.invertBooleans", %w(seq seq), "Accepts an array of booleans, and returns them inverted." ) { |meth| meth.params[0].collect{ |x| !x } } s.serve ruby-mkrf-0.2.3.orig/test/sample_files/syck-0.55/ext/ruby/samples/okayRpc-client.rb0000644000000000000000000000235511672453175026525 0ustar rootroot# # Simple client to read metadata from whytheluckystiff.net's # demo !okay/rpc server. # require 'okay/rpc' host = 'whytheluckystiff.net' c = Okay::RPC::Client.new( host ) # Wrapping strings for display class String def wordwrap( len ) gsub( /\n/, "\n\n" ).gsub( /(.{1,#{len}})(\s+|$)/, "\\1\n" ) end end puts "** Sample Client for !okay/rpc using client version #{Okay::RPC::VERSION} **" puts "** Using !okay/rpc server at #{host} **" puts "-- system.about() --" puts about = c.call( 'system.about' ) if about.is_a? Okay::RPC::Fault puts " Fault: " + about.inspect exit end puts " Server: #{ about['name'] }" puts " Version: #{ about['version'] }" puts puts about['about'].wordwrap( 30 ).gsub!( /^/, ' ' ) puts puts "-- system.listMethods() --" puts methods = c.call( 'system.listMethods' ) puts "** #{methods.length} methods available on server **" puts "** Requesting method signatures and docs **" methods.each { |m| c.qcall( 'system.methodSignature', m ) c.qcall( 'system.methodHelp', m ) } methodSigs = c.qrun.documents methods.each { |m| sig = methodSigs.shift help = methodSigs.shift puts puts "-- !#{sig.shift} #{m}(#{sig.collect { |p| '!' + p }.join( ', ' )}) --" puts help.wordwrap( 30 ).gsub( /^/, ' ' ) } ruby-mkrf-0.2.3.orig/test/sample_files/syck-0.55/ext/ruby/samples/okayNews-sample.rb0000644000000000000000000003640311672453175026721 0ustar rootrootrequire 'okay/news' # Wrapping strings for display class String def wordwrap( len ) gsub( /\n\s*/, "\n\n" ).gsub( /.{#{len},}?\s+/, "\\0\n" ) end end news = YAML::load( DATA ) news.items.each { |item| puts "-- #{ news.title } @ #{ item.pubTime } --" puts item.description.wordwrap( 70 ).gsub!( /^/, ' ' ) puts } __END__ --- %YAML:1.0 !okay/news title: whytheluckystiff.net link: http://www.whytheluckystiff.net/ description: Home remedies for braindeath. updatePeriod: 00:60.00 items: - !okay/news/item pubTime: 2002-10-23T09:03:40.00-06:00 link: http://whytheluckystiff.net/arch/2002/10/23#1035385420 description: > Considering the "discussion"="http://philringnalda.com/archives/002359.php" around the Web about saving RSS bandwidth, I checked the size of my RSS feed: 33k. And my YAML feed: 15k. A big part of that is my content included twice for the "content:encoded"="http://web.resource.org/rss/1.0/modules/content/" tag. Which brings up another valid point in favor of YAML feeds. **YAML doesn't have entity-encoding issues.** XML users: can you imagine? - !okay/news/item pubTime: 2002-10-22T23:46:57.00-06:00 link: http://whytheluckystiff.net/arch/2002/10/22#1035352017 description: > Last night I hung out at this hotel with my relatives, all in town to see "Dot"="http://www.whytheluckystiff.net/arch/2002/10/20#1035131109". I haven't wanted to post here on my site all the events concerning her death, even though I "abandoned"="http://www.whytheluckystiff.net/arch/2002/10/07#1034018848" my paper journal to write on this site. I think I need to flag some entries as private. Because it's too wierd having a site where one minute I'm jabbering about YAML ideas and the next I'm going into the details of ovarian cancer or the rifts in my family life. Sure, I want both documented. But I think I can tell what entries are meant for you and what is meant for me. I'm going to write anyway, though. Under her closed eyes, she went away on Sunday night at 8:12 PM. She did go naturally, as we all wished. You know, I've never had a life like I have right now. Feeling her with me now. Believing that she is somewhere now. Why do I insist on thinking that? They all think she's been ushered off a plane and onto some tropical beach, a part of some Carnival cruise that you get for free after you die. Run by the Carnival people who died and their heaven is continuing their employment. (Why am I telling heaven jokes?) I feel love, so I think about her. Her death was a sunset. As the sun is buried, it still casts the colors upon the world for a while longer. I see those colors in my life now. It's either her literal dispersement into the air or she is simply so joyful now that I can feel her happiness from my vantage point. Can I just tell you: it's so incredible to know someone who had no agenda, who never got offended, who wielded such power but never used it against anyone? So, the hotel. We went downstairs and lounged in the jacuzzi. Talked about work, baseball, Dot and Ray. We did some dives into the pool. One guy walked in with his pecs and abs with his chick and messed around in the pool. Dancing around with each other and playing naughty, splashy pool games with each other. Suddenly they were in the jacuzzi with all of us. It was whack. It was _The Bachelor_. I think I even said, "Hey, are you *The Bachelor*?" He went and told on us for having too many people in the pool and *especially* in the jacuzzi. My aunt and uncle had to move hotels. And that's when I started to question the beauty of life again. - !okay/news/item pubTime: 2002-10-21T15:34:31.00-06:00 link: http://whytheluckystiff.net/arch/2002/10/21#1035236071 description: > Today I've decided to rewrite the parser backend for yaml.rb. The new parser will be called **Pill** and will also become the secret behind "reStructuredText"="http://docutils.sf.net/rst.html" for Ruby. The reST team has put some excellent effort into documenting and producing code for their project. YAML and reST share so much in common that I thought I could save a lot of time by abstracting the parser so that it can handle both. To give you an idea of what I mean, just look at the following Ruby code. To create a parser, you merely extend the Pill class and define the meaning of your tokens: = class YamlParser < Pill = # Define YAML's literal block = def_token :block, :literal, = [ '|' ], [ :entry ] = # Define mapping indicator = def_token :indicator, :mapping, = [ ':', :space ] = end This is prototype code. Hopefully it gives the picture though. I define the specific syntax symbols and the parser sends them to event handlers which can construct native datatypes (YAML) or markup (reST). - !okay/news/item pubTime: 2002-10-21T13:25:25.00-06:00 link: http://whytheluckystiff.net/arch/2002/10/21#1035228325 description: > Okay, so I just found an "FAQ"="http://otakuworld.com/games/saturn/vs_moves.htm" for **Vampire Saviour: World of Darkness**, the game I "raved about"="http://advogato.org/article/562.html" on Advogato. The article was likely misplaced. Thinking about it now I can't imagine why I posted it there, but I thought it would be fun. Probably was fun. Keep in mind that this is an old game now and since I don't say that in the article it probably sounds like I'm plugging some new, hot commercial game. No way. This game is dead. And time for a resurrection! It turns out that the reason I can't find any information about **Vampire Saviour: World of Darkness** is because the game isn't really even called that. Yes, that's the name on the arcade machine. I wasn't paying attention to the title screen, though, which probably said **The Lord of Vampire**. At any rate, I didn't see a single vampire in this whole game. Not to mention that it was a bright and colorful game (not a World of Darkness as one might suppose). With all the love that I have in my bosom for this fine Capcom production, I feel that I must rename it for use in my private spheres of influence. In my associations with humanity, I will now refer to this game as **The Organism of Life-giving Eternity**. So it is. So it is. - !okay/news/item pubTime: 2002-10-20T10:25:09.00-06:00 link: http://whytheluckystiff.net/arch/2002/10/20#1035131109 description: > My grandmother has slipped into a coma and will likely close her life in the next few days. We've actually had her much longer than we were supposed to. It's been a wonderful time to be with her. Her name is Dot and I think of her often. She's one of those people in my family tree that I really hope a lot of genes seep down from. She spent her whole life fully devoted to her husband. Their life was golf, Hawaii, gambling, their children. Her husband now has Alzheimer's and she has taught us that Alzheimer's isn't a frightening or sad disease. She helps us see how cute he is and how he still does remember who we are. Maybe he doesn't recognize our faces, but he recognizes something. And so it's up to us to recognize him back. Dot makes me laugh everytime I am with her. One of my favorites was when she looked up at my mom and said, "I was in love with two black men." She can say whatever she wants these days and she does. She thinks her doctors are sexy. She watches lots of baseball, a life-long Dodger's fan, but also general fan of the game. I should stop writing now. I just can't think about it all today. Approaching death has been so hard for her. I just wish her death could be as natural as the rest of her life has been. - !okay/news/item pubTime: 2002-10-19T11:02:53.00-06:00 link: http://whytheluckystiff.net/arch/2002/10/19#1035046973 description: > Watching the "RSS 1.0/2.0 logomachy"="http://diveintomark.org/archives/2002/10/18.html#take_up_knitting" is evocative in light of a new "YAML equivalent"="http://whytheluckystiff.net/why.yml". The struggle in the YAML world will be trying to steer people away from interleaving content. (The tags with namespaces you see mixed in with the RSS tags.) The struggle is: what is our answer? See the collection of links at the end of "Mark's"="http://diveintomark.org/" posting, along with the "XSS Draft"="http://www.mplode.com/tima/archives/000126.html". - !okay/news/item pubTime: 2002-10-19T10:46:36.00-06:00 link: http://whytheluckystiff.net/arch/2002/10/19#1035045996 description: > About distribution in the Ruby kingdom. I think "CPAN"="http://search.cpan.org/" has put a bit of undue pressure on us. I personally use CPAN for documentation and for comparing modules before downloading them. I don't use CPAN to install modules. I use the ports collection or (on Linux) the packaging system. Ruby has "RAA"="http://www.ruby-lang.org/en/raa.html". RAA is less than what we need. RAA has some great stuff. You can access it through web services. I just wish it had a more comprehensive search and extensive documentation for each module. And package mirrors. Make sure we don't loose our libraries. Two RAA replacements are in progress, both of which I took a close look at today. # "rpkg"="http://www.allruby.com/rpkg/" is great. It's a Debian-like packaging system for Ruby modules. I don't know exactly how they solve the mirroring issues and there's no accessibility to module docs and definitions online, but it's got some neat ideas. # "rubynet"="http://www.rubynet.org/" is a precocious project to do about anything you could want to do with Ruby packaging. I'm still trying to decide if it's overkill. Again, plenty of ideas to ease installation but no hints of making each package's docs available. I don't think Ruby users have a large problem installing modules. Most modules are quite straightforward. The bigger issue is organization and documentation. Are we the only group who hasn't standardized on a doc format!? - !okay/news/item pubTime: 2002-10-19T01:25:46.00-06:00 link: http://whytheluckystiff.net/arch/2002/10/19#1035012346 description: > I am aghast! YAML.rb 0.44 is now in the FreeBSD ports collection! Sitting right there in /usr/ports/devel/ruby-yaml! It installs perfectly. There are two amazing things about this: # Stanislav Grozev did this without any provocation from me. # I've actually seen one of my software projects through to see some sort of general distribution and acceptance! Seriously, what a kick! The ports collection is like the Hall of Fame for me. I cvsup fresh ports at least weekly and I have for the last several years. I probably shouldn't be making such a big deal out of it, but it's so rewarding to see that someone appreciates this library enough to help step it along for distribution. *Thank you, Stanislav!* - !okay/news/item pubTime: 2002-10-18T13:50:57.00-06:00 link: http://whytheluckystiff.net/arch/2002/10/18#1034970657 description: > I know there's some incredible uses for YAML and documentation, but it's difficult to grasp exactly what that is. Stefan Schmiedl posted an idea to the Yaml-core list last month: > What I had in mind was a "layered" wiki approach, where you would organize YAML-documents to create a page. You would edit the "nodes" of the page via usual wiki approach or modify the sequence collecting the docs. At the end of the day you'd have a large collection of YAML-docs and a second collection of "organizers" collating them into pages. The time is soon coming where this will be available here on this site. I'm thinking of something similiar to Wiki. The difference would be that the content would be updated to YAML documents on the server in Yod format. The documents could then be exported to HTML, man pages, CHM, PDF. It would be Wiki, but with a real end toward polished documentation. I want to store organized text on this site in such a way that it can be removed from the site, distributable, and yet very easy to edit. - !okay/news/item pubTime: 2002-10-18T10:01:49.00-06:00 link: http://whytheluckystiff.net/arch/2002/10/18#1034956909 description: > **Coldplay: A Rush of Blood to the Head** Despite what the music press might say, I feel obliged to like whatever music I please to like. And yet, when I read something about an album, I let it easily influence my listening. For a few weeks at least. If some poor review keeps me from hearing an album or I can't make it through the album a few times, then likely I've missed the chance to hear the music as the artist intended it. I don't know why I gave **A Rush of Blood to the Head** a chance really. I remember being unimpressed with Coldplay's first effort. Not to mention that the public's admiration of the band was rather discouraging. But these days I find myself opening back up to **Weezer**, **Ben Kweller**, **Supergrass**. Perhaps I have a diluted catalog, but I really enjoy the songs. What can I say? The song "Clocks" struck me in a peculiar way. The song simply turned my opinion. I actually heard his voice as he intended. I fancied his use of repetition. I went back and listened again. This song had a very warm emotion attached. The disc continued to play and that's when I realized that all of Coldplay's songs have that same emotion spun throughout. Perhaps that's a strike against the band. Their feeling fluctuates between songs, but they don't ever get terrifically angry. Nor do they pity themselves much. If they don't explore all of those avenues of thought, how can they very well be songwriters at all? Coldplay simply shines like a dazzle of light across a country lake. Constantly and slightly beautiful. I'm not the sort of person who can bore of natural beauty. I don't imagine many people can look at a sunset and discard it thanks to the monotony of sunsets each day. It turns out that Coldplay makes beautiful music, which fills a definite void in my collection. ruby-mkrf-0.2.3.orig/test/sample_files/syck-0.55/ext/ruby/samples/yaml-sortHashKeys.rb0000644000000000000000000000604711672453175027232 0ustar rootrootrequire 'yaml' class String def wordwrap( len ) gsub( /\n/, "\n\n" ).gsub( /.{#{len},}?\s+/, "\\0\n" ) end end puts "** Ordering YAML mapping keys **" puts # # Robert Feldt wrote: # # > ...when I use Yaml for ini files I'd like to be able to # > spec an order for map pairs to be serialized in. Obviously Ruby hashes has # > no garantuee for this but are there any plans on supporting this? If not # > I'll whip something up myself; I really wanna make sure the most important # > stuff comes out on top... # # I had three suggestions: # # 1. When using objects, you can specify the ordering of your properties # explicitly by defining a `to_yaml_properties' method. # class Instrument attr_accessor :name, :key, :is_woodwind def initialize( n, k, ww ) @name, @key, @is_woodwind = n, k, ww end def to_yaml_properties [ '@name', '@key', '@is_woodwind' ] end end puts "-- 1. Instrument object --" test1 = Instrument.new( 'Alto Saxophone', 'Eb', true ) puts test1.inspect.wordwrap( 30 ).gsub( /^/, ' ' ) puts puts "** With ordered properties **" puts test1.to_yaml.gsub( /^/, ' ' ) puts # # 2. The same can't be done for Hashes because the key set isn't # predictable. But if the :SortKeys method is called, then # the Hash will be sorted with Hash#sort. You could define a # singleton `sort' method to sort certain Hashes. # test2 = { 'name' => 'Alto Saxophone', 'key' => 'Eb', 'is_woodwind' => true } def test2.sort order = [ 'name', 'key', 'is_woodwind' ] super { |a, b| order.index( a[0] ) <=> order.index( b[0] ) } end puts "-- 2. Instrument hash --" puts test2.inspect.wordwrap( 30 ).gsub( /^/, ' ' ) puts puts "** With ordered keys **" puts test2.to_yaml( :SortKeys => true ).gsub( /^/, ' ' ) puts # # Alternatively, you could define a singleton `to_a' to sort # correctly and skip the :SortKeys option. # # 3. Finally, the YAML spec now defines an !omap type. This type is # an ordered mapping with unique keys. YAML.rb will load this # into a Hash-like class. # test3 = YAML::Omap[ 'name', 'Alto Saxophone', 'key', 'Eb', 'is_woodwind', true ] puts "-- 3. Instrument Omap --" puts test3.inspect.wordwrap( 30 ).gsub( /^/, ' ' ) puts puts "** With ordered keys **" puts test3.to_yaml.gsub( /^/, ' ' ) puts # # Robert's answer was great. He used a mixin to add a singleton `to_a' # method, which allows him to easily sort many hashes. # module SortableToAKeys def key_order=( keys ) @__key_order = keys end def to_a ks, out = self.keys, [] (@__key_order + (ks - @__key_order)).each do |key| out << [key, self[key]] if ks.include?(key) end out end end test4 = { 'name' => 'Alto Saxophone', 'key' => 'Eb', 'is_woodwind' => true } puts "-- 4. Instrument SortableToAKeys --" puts test4.inspect.wordwrap( 30 ).gsub( /^/, ' ' ) puts test4.extend SortableToAKeys test4.key_order = [ 'name', 'key', 'is_woodwind' ] puts "** With ordered keys **" puts test4.to_yaml.gsub( /^/, ' ' ) puts ruby-mkrf-0.2.3.orig/test/sample_files/syck-0.55/ext/ruby/samples/okayNews-validate.rb0000644000000000000000000003657311672453175027241 0ustar rootrootrequire 'okay/news' # Wrapping strings for display class String def wordwrap( len ) gsub( /\n\s*/, "\n\n" ).gsub( /.{#{len},}?\s+/, "\\0\n" ) end end news = YAML::parse( DATA ) if Okay.validate_node( news ) puts "** !okay/news validated **" news = news.transform news.items.each { |item| puts "-- #{ news.title } @ #{ item.pubTime } --" puts item.description.wordwrap( 70 ).gsub!( /^/, ' ' ) puts } end __END__ --- %YAML:1.0 !okay/news title: whytheluckystiff.net link: http://www.whytheluckystiff.net/ description: Home remedies for braindeath. updatePeriod: 00:60.00 items: - !okay/news/item pubTime: 2002-10-23T09:03:40.00-06:00 link: http://whytheluckystiff.net/arch/2002/10/23#1035385420 description: > Considering the "discussion"="http://philringnalda.com/archives/002359.php" around the Web about saving RSS bandwidth, I checked the size of my RSS feed: 33k. And my YAML feed: 15k. A big part of that is my content included twice for the "content:encoded"="http://web.resource.org/rss/1.0/modules/content/" tag. Which brings up another valid point in favor of YAML feeds. **YAML doesn't have entity-encoding issues.** XML users: can you imagine? - !okay/news/item pubTime: 2002-10-22T23:46:57.00-06:00 link: http://whytheluckystiff.net/arch/2002/10/22#1035352017 description: > Last night I hung out at this hotel with my relatives, all in town to see "Dot"="http://www.whytheluckystiff.net/arch/2002/10/20#1035131109". I haven't wanted to post here on my site all the events concerning her death, even though I "abandoned"="http://www.whytheluckystiff.net/arch/2002/10/07#1034018848" my paper journal to write on this site. I think I need to flag some entries as private. Because it's too wierd having a site where one minute I'm jabbering about YAML ideas and the next I'm going into the details of ovarian cancer or the rifts in my family life. Sure, I want both documented. But I think I can tell what entries are meant for you and what is meant for me. I'm going to write anyway, though. Under her closed eyes, she went away on Sunday night at 8:12 PM. She did go naturally, as we all wished. You know, I've never had a life like I have right now. Feeling her with me now. Believing that she is somewhere now. Why do I insist on thinking that? They all think she's been ushered off a plane and onto some tropical beach, a part of some Carnival cruise that you get for free after you die. Run by the Carnival people who died and their heaven is continuing their employment. (Why am I telling heaven jokes?) I feel love, so I think about her. Her death was a sunset. As the sun is buried, it still casts the colors upon the world for a while longer. I see those colors in my life now. It's either her literal dispersement into the air or she is simply so joyful now that I can feel her happiness from my vantage point. Can I just tell you: it's so incredible to know someone who had no agenda, who never got offended, who wielded such power but never used it against anyone? So, the hotel. We went downstairs and lounged in the jacuzzi. Talked about work, baseball, Dot and Ray. We did some dives into the pool. One guy walked in with his pecs and abs with his chick and messed around in the pool. Dancing around with each other and playing naughty, splashy pool games with each other. Suddenly they were in the jacuzzi with all of us. It was whack. It was _The Bachelor_. I think I even said, "Hey, are you *The Bachelor*?" He went and told on us for having too many people in the pool and *especially* in the jacuzzi. My aunt and uncle had to move hotels. And that's when I started to question the beauty of life again. - !okay/news/item pubTime: 2002-10-21T15:34:31.00-06:00 link: http://whytheluckystiff.net/arch/2002/10/21#1035236071 description: > Today I've decided to rewrite the parser backend for yaml.rb. The new parser will be called **Pill** and will also become the secret behind "reStructuredText"="http://docutils.sf.net/rst.html" for Ruby. The reST team has put some excellent effort into documenting and producing code for their project. YAML and reST share so much in common that I thought I could save a lot of time by abstracting the parser so that it can handle both. To give you an idea of what I mean, just look at the following Ruby code. To create a parser, you merely extend the Pill class and define the meaning of your tokens: = class YamlParser < Pill = # Define YAML's literal block = def_token :block, :literal, = [ '|' ], [ :entry ] = # Define mapping indicator = def_token :indicator, :mapping, = [ ':', :space ] = end This is prototype code. Hopefully it gives the picture though. I define the specific syntax symbols and the parser sends them to event handlers which can construct native datatypes (YAML) or markup (reST). - !okay/news/item pubTime: 2002-10-21T13:25:25.00-06:00 link: http://whytheluckystiff.net/arch/2002/10/21#1035228325 description: > Okay, so I just found an "FAQ"="http://otakuworld.com/games/saturn/vs_moves.htm" for **Vampire Saviour: World of Darkness**, the game I "raved about"="http://advogato.org/article/562.html" on Advogato. The article was likely misplaced. Thinking about it now I can't imagine why I posted it there, but I thought it would be fun. Probably was fun. Keep in mind that this is an old game now and since I don't say that in the article it probably sounds like I'm plugging some new, hot commercial game. No way. This game is dead. And time for a resurrection! It turns out that the reason I can't find any information about **Vampire Saviour: World of Darkness** is because the game isn't really even called that. Yes, that's the name on the arcade machine. I wasn't paying attention to the title screen, though, which probably said **The Lord of Vampire**. At any rate, I didn't see a single vampire in this whole game. Not to mention that it was a bright and colorful game (not a World of Darkness as one might suppose). With all the love that I have in my bosom for this fine Capcom production, I feel that I must rename it for use in my private spheres of influence. In my associations with humanity, I will now refer to this game as **The Organism of Life-giving Eternity**. So it is. So it is. - !okay/news/item pubTime: 2002-10-20T10:25:09.00-06:00 link: http://whytheluckystiff.net/arch/2002/10/20#1035131109 description: > My grandmother has slipped into a coma and will likely close her life in the next few days. We've actually had her much longer than we were supposed to. It's been a wonderful time to be with her. Her name is Dot and I think of her often. She's one of those people in my family tree that I really hope a lot of genes seep down from. She spent her whole life fully devoted to her husband. Their life was golf, Hawaii, gambling, their children. Her husband now has Alzheimer's and she has taught us that Alzheimer's isn't a frightening or sad disease. She helps us see how cute he is and how he still does remember who we are. Maybe he doesn't recognize our faces, but he recognizes something. And so it's up to us to recognize him back. Dot makes me laugh everytime I am with her. One of my favorites was when she looked up at my mom and said, "I was in love with two black men." She can say whatever she wants these days and she does. She thinks her doctors are sexy. She watches lots of baseball, a life-long Dodger's fan, but also general fan of the game. I should stop writing now. I just can't think about it all today. Approaching death has been so hard for her. I just wish her death could be as natural as the rest of her life has been. - !okay/news/item pubTime: 2002-10-19T11:02:53.00-06:00 link: http://whytheluckystiff.net/arch/2002/10/19#1035046973 description: > Watching the "RSS 1.0/2.0 logomachy"="http://diveintomark.org/archives/2002/10/18.html#take_up_knitting" is evocative in light of a new "YAML equivalent"="http://whytheluckystiff.net/why.yml". The struggle in the YAML world will be trying to steer people away from interleaving content. (The tags with namespaces you see mixed in with the RSS tags.) The struggle is: what is our answer? See the collection of links at the end of "Mark's"="http://diveintomark.org/" posting, along with the "XSS Draft"="http://www.mplode.com/tima/archives/000126.html". - !okay/news/item pubTime: 2002-10-19T10:46:36.00-06:00 link: http://whytheluckystiff.net/arch/2002/10/19#1035045996 description: > About distribution in the Ruby kingdom. I think "CPAN"="http://search.cpan.org/" has put a bit of undue pressure on us. I personally use CPAN for documentation and for comparing modules before downloading them. I don't use CPAN to install modules. I use the ports collection or (on Linux) the packaging system. Ruby has "RAA"="http://www.ruby-lang.org/en/raa.html". RAA is less than what we need. RAA has some great stuff. You can access it through web services. I just wish it had a more comprehensive search and extensive documentation for each module. And package mirrors. Make sure we don't loose our libraries. Two RAA replacements are in progress, both of which I took a close look at today. # "rpkg"="http://www.allruby.com/rpkg/" is great. It's a Debian-like packaging system for Ruby modules. I don't know exactly how they solve the mirroring issues and there's no accessibility to module docs and definitions online, but it's got some neat ideas. # "rubynet"="http://www.rubynet.org/" is a precocious project to do about anything you could want to do with Ruby packaging. I'm still trying to decide if it's overkill. Again, plenty of ideas to ease installation but no hints of making each package's docs available. I don't think Ruby users have a large problem installing modules. Most modules are quite straightforward. The bigger issue is organization and documentation. Are we the only group who hasn't standardized on a doc format!? - !okay/news/item pubTime: 2002-10-19T01:25:46.00-06:00 link: http://whytheluckystiff.net/arch/2002/10/19#1035012346 description: > I am aghast! YAML.rb 0.44 is now in the FreeBSD ports collection! Sitting right there in /usr/ports/devel/ruby-yaml! It installs perfectly. There are two amazing things about this: # Stanislav Grozev did this without any provocation from me. # I've actually seen one of my software projects through to see some sort of general distribution and acceptance! Seriously, what a kick! The ports collection is like the Hall of Fame for me. I cvsup fresh ports at least weekly and I have for the last several years. I probably shouldn't be making such a big deal out of it, but it's so rewarding to see that someone appreciates this library enough to help step it along for distribution. *Thank you, Stanislav!* - !okay/news/item pubTime: 2002-10-18T13:50:57.00-06:00 link: http://whytheluckystiff.net/arch/2002/10/18#1034970657 description: > I know there's some incredible uses for YAML and documentation, but it's difficult to grasp exactly what that is. Stefan Schmiedl posted an idea to the Yaml-core list last month: > What I had in mind was a "layered" wiki approach, where you would organize YAML-documents to create a page. You would edit the "nodes" of the page via usual wiki approach or modify the sequence collecting the docs. At the end of the day you'd have a large collection of YAML-docs and a second collection of "organizers" collating them into pages. The time is soon coming where this will be available here on this site. I'm thinking of something similiar to Wiki. The difference would be that the content would be updated to YAML documents on the server in Yod format. The documents could then be exported to HTML, man pages, CHM, PDF. It would be Wiki, but with a real end toward polished documentation. I want to store organized text on this site in such a way that it can be removed from the site, distributable, and yet very easy to edit. - !okay/news/item pubTime: 2002-10-18T10:01:49.00-06:00 link: http://whytheluckystiff.net/arch/2002/10/18#1034956909 description: > **Coldplay: A Rush of Blood to the Head** Despite what the music press might say, I feel obliged to like whatever music I please to like. And yet, when I read something about an album, I let it easily influence my listening. For a few weeks at least. If some poor review keeps me from hearing an album or I can't make it through the album a few times, then likely I've missed the chance to hear the music as the artist intended it. I don't know why I gave **A Rush of Blood to the Head** a chance really. I remember being unimpressed with Coldplay's first effort. Not to mention that the public's admiration of the band was rather discouraging. But these days I find myself opening back up to **Weezer**, **Ben Kweller**, **Supergrass**. Perhaps I have a diluted catalog, but I really enjoy the songs. What can I say? The song "Clocks" struck me in a peculiar way. The song simply turned my opinion. I actually heard his voice as he intended. I fancied his use of repetition. I went back and listened again. This song had a very warm emotion attached. The disc continued to play and that's when I realized that all of Coldplay's songs have that same emotion spun throughout. Perhaps that's a strike against the band. Their feeling fluctuates between songs, but they don't ever get terrifically angry. Nor do they pity themselves much. If they don't explore all of those avenues of thought, how can they very well be songwriters at all? Coldplay simply shines like a dazzle of light across a country lake. Constantly and slightly beautiful. I'm not the sort of person who can bore of natural beauty. I don't imagine many people can look at a sunset and discard it thanks to the monotony of sunsets each day. It turns out that Coldplay makes beautiful music, which fills a definite void in my collection. ruby-mkrf-0.2.3.orig/test/sample_files/syck-0.55/ext/ruby/ext/0000755000000000000000000000000011672453175022443 5ustar rootrootruby-mkrf-0.2.3.orig/test/sample_files/syck-0.55/ext/ruby/ext/syck/0000755000000000000000000000000011672453175023414 5ustar rootrootruby-mkrf-0.2.3.orig/test/sample_files/syck-0.55/ext/ruby/ext/syck/rubyext.c0000644000000000000000000017536111672453175025277 0ustar rootroot/* -*- indent-tabs-mode: nil -*- */ /* * rubyext.c * * $Author: why $ * $Date: 2005/05/19 06:07:42 $ * * Copyright (C) 2003-2005 why the lucky stiff */ #include "ruby.h" #include "syck.h" #include #include typedef struct RVALUE { union { #if 0 struct { unsigned long flags; /* always 0 for freed obj */ struct RVALUE *next; } free; #endif struct RBasic basic; struct RObject object; struct RClass klass; /*struct RFloat flonum;*/ /*struct RString string;*/ struct RArray array; /*struct RRegexp regexp;*/ struct RHash hash; /*struct RData data;*/ struct RStruct rstruct; /*struct RBignum bignum;*/ /*struct RFile file;*/ } as; } RVALUE; typedef struct { long hash; char *buffer; long length; long remaining; int printed; } bytestring_t; #define RUBY_DOMAIN "ruby.yaml.org,2002" #ifndef StringValue #define StringValue(v) (v) #endif #ifndef rb_attr_get #define rb_attr_get(o, i) rb_ivar_get(o, i) #endif /* * symbols and constants */ static ID s_new, s_utc, s_at, s_to_f, s_to_i, s_read, s_binmode, s_call, s_cmp, s_transfer, s_update, s_dup, s_haskey, s_match, s_keys, s_unpack, s_tr_bang, s_default_set, s_tag_read_class, s_tag_subclasses, s_resolver, s_push, s_emitter, s_level, s_detect_implicit, s_node_import, s_out, s_input, s_intern, s_transform, s_yaml_new, s_yaml_initialize, s_node_export, s_to_yaml, s_write, s_set_resolver; static ID s_tags, s_domain, s_kind, s_name, s_options, s_type_id, s_type_id_set, s_style, s_style_set, s_value, s_value_set; static VALUE sym_model, sym_generic, sym_input, sym_bytecode; static VALUE sym_scalar, sym_seq, sym_map; static VALUE sym_1quote, sym_2quote, sym_fold, sym_literal, sym_plain, sym_inline; static VALUE cDate, cNode, cMap, cSeq, cScalar, cOut, cParser, cResolver, cPrivateType, cDomainType, cYObject, cBadAlias, cDefaultKey, cMergeKey, cEmitter; static VALUE oDefaultResolver, oGenericResolver; /* * my private collection of numerical oddities. */ static double S_zero() { return 0.0; } static double S_one() { return 1.0; } static double S_inf() { return S_one() / S_zero(); } static double S_nan() { return S_zero() / S_zero(); } static VALUE syck_node_transform( VALUE ); /* * handler prototypes */ SYMID rb_syck_load_handler _((SyckParser *, SyckNode *)); void rb_syck_err_handler _((SyckParser *, char *)); SyckNode * rb_syck_bad_anchor_handler _((SyckParser *, char *)); void rb_syck_output_handler _((SyckEmitter *, char *, long)); void rb_syck_emitter_handler _((SyckEmitter *, st_data_t)); int syck_parser_assign_io _((SyckParser *, VALUE)); struct parser_xtra { VALUE data; /* Borrowed this idea from marshal.c to fix [ruby-core:8067] problem */ VALUE proc; VALUE resolver; int taint; }; struct emitter_xtra { VALUE oid; VALUE data; VALUE port; }; /* * Convert YAML to bytecode */ VALUE rb_syck_compile(self, port) VALUE self, port; { SYMID oid; int taint; char *ret; VALUE bc; bytestring_t *sav; SyckParser *parser = syck_new_parser(); taint = syck_parser_assign_io(parser, port); syck_parser_handler( parser, syck_yaml2byte_handler ); syck_parser_error_handler( parser, NULL ); syck_parser_implicit_typing( parser, 0 ); syck_parser_taguri_expansion( parser, 0 ); oid = syck_parse( parser ); syck_lookup_sym( parser, oid, (char **)&sav ); ret = S_ALLOC_N( char, strlen( sav->buffer ) + 3 ); ret[0] = '\0'; strcat( ret, "D\n" ); strcat( ret, sav->buffer ); syck_free_parser( parser ); bc = rb_str_new2( ret ); if ( taint ) OBJ_TAINT( bc ); return bc; } /* * read from io. */ long rb_syck_io_str_read( char *buf, SyckIoStr *str, long max_size, long skip ) { long len = 0; ASSERT( str != NULL ); max_size -= skip; if ( max_size <= 0 ) max_size = 0; else { /* * call io#read. */ VALUE src = (VALUE)str->ptr; VALUE n = LONG2NUM(max_size); VALUE str2 = rb_funcall2(src, s_read, 1, &n); if (!NIL_P(str2)) { StringValue(str2); len = RSTRING(str2)->len; memcpy( buf + skip, RSTRING(str2)->ptr, len ); } } len += skip; buf[len] = '\0'; return len; } /* * determine: are we reading from a string or io? * (returns tainted? boolean) */ int syck_parser_assign_io(parser, port) SyckParser *parser; VALUE port; { int taint = Qtrue; VALUE tmp; if (!NIL_P(tmp = rb_check_string_type(port))) { taint = OBJ_TAINTED(port); /* original taintedness */ port = tmp; syck_parser_str( parser, RSTRING(port)->ptr, RSTRING(port)->len, NULL ); } else if (rb_respond_to(port, s_read)) { if (rb_respond_to(port, s_binmode)) { rb_funcall2(port, s_binmode, 0, 0); } syck_parser_str( parser, (char *)port, 0, rb_syck_io_str_read ); } else { rb_raise(rb_eTypeError, "instance of IO needed"); } return taint; } /* * Get value in hash by key, forcing an empty hash if nil. */ VALUE syck_get_hash_aref(hsh, key) VALUE hsh, key; { VALUE val = rb_hash_aref( hsh, key ); if ( NIL_P( val ) ) { val = rb_hash_new(); rb_hash_aset(hsh, key, val); } return val; } /* * creating timestamps */ SYMID rb_syck_mktime(str, len) char *str; long len; { VALUE time; char *ptr = str; VALUE year = INT2FIX(0); VALUE mon = INT2FIX(0); VALUE day = INT2FIX(0); VALUE hour = INT2FIX(0); VALUE min = INT2FIX(0); VALUE sec = INT2FIX(0); long usec; /* Year*/ if ( ptr[0] != '\0' && len > 0 ) { year = INT2FIX(strtol(ptr, NULL, 10)); } /* Month*/ ptr += 4; if ( ptr[0] != '\0' && len > ptr - str ) { while ( !ISDIGIT( *ptr ) ) ptr++; mon = INT2FIX(strtol(ptr, NULL, 10)); } /* Day*/ ptr += 2; if ( ptr[0] != '\0' && len > ptr - str ) { while ( !ISDIGIT( *ptr ) ) ptr++; day = INT2FIX(strtol(ptr, NULL, 10)); } /* Hour*/ ptr += 2; if ( ptr[0] != '\0' && len > ptr - str ) { while ( !ISDIGIT( *ptr ) ) ptr++; hour = INT2FIX(strtol(ptr, NULL, 10)); } /* Minute */ ptr += 2; if ( ptr[0] != '\0' && len > ptr - str ) { while ( !ISDIGIT( *ptr ) ) ptr++; min = INT2FIX(strtol(ptr, NULL, 10)); } /* Second */ ptr += 2; if ( ptr[0] != '\0' && len > ptr - str ) { while ( !ISDIGIT( *ptr ) ) ptr++; sec = INT2FIX(strtol(ptr, NULL, 10)); } /* Millisecond */ ptr += 2; if ( len > ptr - str && *ptr == '.' ) { char *padded = syck_strndup( "000000", 6 ); char *end = ptr + 1; while ( isdigit( *end ) ) end++; MEMCPY(padded, ptr + 1, char, end - (ptr + 1)); usec = strtol(padded, NULL, 10); S_FREE(padded); } else { usec = 0; } /* Time Zone*/ while ( len > ptr - str && *ptr != 'Z' && *ptr != '+' && *ptr != '-' && *ptr != '\0' ) ptr++; if ( len > ptr - str && ( *ptr == '-' || *ptr == '+' ) ) { time_t tz_offset = strtol(ptr, NULL, 10) * 3600; time_t tmp; while ( *ptr != ':' && *ptr != '\0' ) ptr++; if ( *ptr == ':' ) { ptr += 1; if ( tz_offset < 0 ) { tz_offset -= strtol(ptr, NULL, 10) * 60; } else { tz_offset += strtol(ptr, NULL, 10) * 60; } } /* Make TZ time*/ time = rb_funcall(rb_cTime, s_utc, 6, year, mon, day, hour, min, sec); tmp = NUM2LONG(rb_funcall(time, s_to_i, 0)) - tz_offset; return rb_funcall(rb_cTime, s_at, 2, LONG2NUM(tmp), LONG2NUM(usec)); } else { /* Make UTC time*/ return rb_funcall(rb_cTime, s_utc, 7, year, mon, day, hour, min, sec, LONG2NUM(usec)); } } /* * handles merging of an array of hashes * (see http://www.yaml.org/type/merge/) */ VALUE syck_merge_i( entry, hsh ) VALUE entry, hsh; { VALUE tmp; if ( !NIL_P(tmp = rb_check_convert_type(entry, T_HASH, "Hash", "to_hash")) ) { entry = tmp; rb_funcall( hsh, s_update, 1, entry ); } return Qnil; } /* * default handler for ruby.yaml.org types */ int yaml_org_handler( n, ref ) SyckNode *n; VALUE *ref; { char *type_id = n->type_id; int transferred = 0; long i = 0; VALUE obj = Qnil; if ( type_id != NULL && strncmp( type_id, "tag:yaml.org,2002:", 18 ) == 0 ) { type_id += 18; } switch (n->kind) { case syck_str_kind: transferred = 1; if ( type_id == NULL ) { obj = rb_str_new( n->data.str->ptr, n->data.str->len ); } else if ( strcmp( type_id, "null" ) == 0 ) { obj = Qnil; } else if ( strcmp( type_id, "binary" ) == 0 ) { VALUE arr; obj = rb_str_new( n->data.str->ptr, n->data.str->len ); rb_funcall( obj, s_tr_bang, 2, rb_str_new2( "\n\t " ), rb_str_new2( "" ) ); arr = rb_funcall( obj, s_unpack, 1, rb_str_new2( "m" ) ); obj = rb_ary_shift( arr ); } else if ( strcmp( type_id, "bool#yes" ) == 0 ) { obj = Qtrue; } else if ( strcmp( type_id, "bool#no" ) == 0 ) { obj = Qfalse; } else if ( strcmp( type_id, "int#hex" ) == 0 ) { syck_str_blow_away_commas( n ); obj = rb_cstr2inum( n->data.str->ptr, 16 ); } else if ( strcmp( type_id, "int#oct" ) == 0 ) { syck_str_blow_away_commas( n ); obj = rb_cstr2inum( n->data.str->ptr, 8 ); } else if ( strcmp( type_id, "int#base60" ) == 0 ) { char *ptr, *end; long sixty = 1; long total = 0; syck_str_blow_away_commas( n ); ptr = n->data.str->ptr; end = n->data.str->ptr + n->data.str->len; while ( end > ptr ) { long bnum = 0; char *colon = end - 1; while ( colon >= ptr && *colon != ':' ) { colon--; } if ( *colon == ':' ) *colon = '\0'; bnum = strtol( colon + 1, NULL, 10 ); total += bnum * sixty; sixty *= 60; end = colon; } obj = INT2FIX(total); } else if ( strncmp( type_id, "int", 3 ) == 0 ) { syck_str_blow_away_commas( n ); obj = rb_cstr2inum( n->data.str->ptr, 10 ); } else if ( strcmp( type_id, "float#base60" ) == 0 ) { char *ptr, *end; long sixty = 1; double total = 0.0; syck_str_blow_away_commas( n ); ptr = n->data.str->ptr; end = n->data.str->ptr + n->data.str->len; while ( end > ptr ) { double bnum = 0; char *colon = end - 1; while ( colon >= ptr && *colon != ':' ) { colon--; } if ( *colon == ':' ) *colon = '\0'; bnum = strtod( colon + 1, NULL ); total += bnum * sixty; sixty *= 60; end = colon; } obj = rb_float_new( total ); } else if ( strcmp( type_id, "float#nan" ) == 0 ) { obj = rb_float_new( S_nan() ); } else if ( strcmp( type_id, "float#inf" ) == 0 ) { obj = rb_float_new( S_inf() ); } else if ( strcmp( type_id, "float#neginf" ) == 0 ) { obj = rb_float_new( -S_inf() ); } else if ( strncmp( type_id, "float", 5 ) == 0 ) { double f; syck_str_blow_away_commas( n ); f = strtod( n->data.str->ptr, NULL ); obj = rb_float_new( f ); } else if ( strcmp( type_id, "timestamp#iso8601" ) == 0 ) { obj = rb_syck_mktime( n->data.str->ptr, n->data.str->len ); } else if ( strcmp( type_id, "timestamp#spaced" ) == 0 ) { obj = rb_syck_mktime( n->data.str->ptr, n->data.str->len ); } else if ( strcmp( type_id, "timestamp#ymd" ) == 0 ) { char *ptr = n->data.str->ptr; VALUE year, mon, day; /* Year*/ ptr[4] = '\0'; year = INT2FIX(strtol(ptr, NULL, 10)); /* Month*/ ptr += 4; while ( !ISDIGIT( *ptr ) ) ptr++; mon = INT2FIX(strtol(ptr, NULL, 10)); /* Day*/ ptr += 2; while ( !ISDIGIT( *ptr ) ) ptr++; day = INT2FIX(strtol(ptr, NULL, 10)); if ( !cDate ) { /* * Load Date module */ rb_require( "date" ); cDate = rb_const_get( rb_cObject, rb_intern("Date") ); } obj = rb_funcall( cDate, s_new, 3, year, mon, day ); } else if ( strncmp( type_id, "timestamp", 9 ) == 0 ) { obj = rb_syck_mktime( n->data.str->ptr, n->data.str->len ); } else if ( strncmp( type_id, "merge", 5 ) == 0 ) { obj = rb_funcall( cMergeKey, s_new, 0 ); } else if ( strncmp( type_id, "default", 7 ) == 0 ) { obj = rb_funcall( cDefaultKey, s_new, 0 ); } else if ( n->data.str->style == scalar_plain && n->data.str->len > 1 && strncmp( n->data.str->ptr, ":", 1 ) == 0 ) { obj = rb_funcall( oDefaultResolver, s_transfer, 2, rb_str_new2( "tag:ruby.yaml.org,2002:sym" ), rb_str_new( n->data.str->ptr + 1, n->data.str->len - 1 ) ); } else if ( strcmp( type_id, "str" ) == 0 ) { obj = rb_str_new( n->data.str->ptr, n->data.str->len ); } else { transferred = 0; obj = rb_str_new( n->data.str->ptr, n->data.str->len ); } break; case syck_seq_kind: if ( type_id == NULL || strcmp( type_id, "seq" ) == 0 ) { transferred = 1; } obj = rb_ary_new2( n->data.list->idx ); for ( i = 0; i < n->data.list->idx; i++ ) { rb_ary_store( obj, i, syck_seq_read( n, i ) ); } break; case syck_map_kind: if ( type_id == NULL || strcmp( type_id, "map" ) == 0 ) { transferred = 1; } obj = rb_hash_new(); for ( i = 0; i < n->data.pairs->idx; i++ ) { VALUE k = syck_map_read( n, map_key, i ); VALUE v = syck_map_read( n, map_value, i ); int skip_aset = 0; /* * Handle merge keys */ if ( rb_obj_is_kind_of( k, cMergeKey ) ) { VALUE tmp; if ( !NIL_P(tmp = rb_check_convert_type(v, T_HASH, "Hash", "to_hash")) ) { VALUE dup = rb_funcall( tmp, s_dup, 0 ); rb_funcall( dup, s_update, 1, obj ); obj = dup; skip_aset = 1; } else if ( !NIL_P(tmp = rb_check_array_type(v)) ) { VALUE end = rb_ary_pop( tmp ); VALUE tmph = rb_check_convert_type(end, T_HASH, "Hash", "to_hash"); if ( !NIL_P(tmph) ) { VALUE dup = rb_funcall( tmph, s_dup, 0 ); tmp = rb_ary_reverse( tmp ); rb_ary_push( tmp, obj ); rb_iterate( rb_each, tmp, syck_merge_i, dup ); obj = dup; skip_aset = 1; } } } else if ( rb_obj_is_kind_of( k, cDefaultKey ) ) { rb_funcall( obj, s_default_set, 1, v ); skip_aset = 1; } if ( ! skip_aset ) { rb_hash_aset( obj, k, v ); } } break; } *ref = obj; return transferred; } /* * {native mode} node handler * - Converts data into native Ruby types */ SYMID rb_syck_load_handler(p, n) SyckParser *p; SyckNode *n; { VALUE obj = Qnil; struct parser_xtra *bonus = (struct parser_xtra *)p->bonus; VALUE resolver = bonus->resolver; if ( NIL_P( resolver ) ) { resolver = oDefaultResolver; } /* * Create node, */ obj = rb_funcall( resolver, s_node_import, 1, Data_Wrap_Struct( cNode, NULL, NULL, n ) ); /* * ID already set, let's alter the symbol table to accept the new object */ if (n->id > 0 && !NIL_P(obj)) { MEMCPY((void *)n->id, (void *)obj, RVALUE, 1); MEMZERO((void *)obj, RVALUE, 1); obj = n->id; } if ( bonus->taint) OBJ_TAINT( obj ); if ( bonus->proc != 0 ) rb_funcall(bonus->proc, s_call, 1, obj); rb_hash_aset(bonus->data, INT2FIX(RHASH(bonus->data)->tbl->num_entries), obj); return obj; } /* * friendly errors. */ void rb_syck_err_handler(p, msg) SyckParser *p; char *msg; { char *endl = p->cursor; while ( *endl != '\0' && *endl != '\n' ) endl++; endl[0] = '\0'; rb_raise(rb_eArgError, "%s on line %d, col %d: `%s'", msg, p->linect, p->cursor - p->lineptr, p->lineptr); } /* * provide bad anchor object to the parser. */ SyckNode * rb_syck_bad_anchor_handler(p, a) SyckParser *p; char *a; { VALUE anchor_name = rb_str_new2( a ); SyckNode *badanc = syck_new_map( rb_str_new2( "name" ), anchor_name ); badanc->type_id = syck_strndup( "tag:ruby.yaml.org,2002:object:YAML::Syck::BadAlias", 53 ); return badanc; } /* * data loaded based on the model requested. */ void syck_set_model( p, input, model ) VALUE p, input, model; { SyckParser *parser; Data_Get_Struct(p, SyckParser, parser); syck_parser_handler( parser, rb_syck_load_handler ); /* WARN: gonna be obsoleted soon!! */ if ( model == sym_generic ) { rb_funcall( p, s_set_resolver, 1, oGenericResolver ); } syck_parser_implicit_typing( parser, 1 ); syck_parser_taguri_expansion( parser, 1 ); if ( NIL_P( input ) ) { input = rb_ivar_get( p, s_input ); } if ( input == sym_bytecode ) { syck_parser_set_input_type( parser, syck_bytecode_utf8 ); } else { syck_parser_set_input_type( parser, syck_yaml_utf8 ); } syck_parser_error_handler( parser, rb_syck_err_handler ); syck_parser_bad_anchor_handler( parser, rb_syck_bad_anchor_handler ); } /* * mark parser nodes */ static void syck_mark_parser(parser) SyckParser *parser; { struct parser_xtra *bonus; rb_gc_mark(parser->root); rb_gc_mark(parser->root_on_error); if ( parser->bonus != NULL ) { bonus = (struct parser_xtra *)parser->bonus; rb_gc_mark( bonus->data ); rb_gc_mark( bonus->proc ); } } /* * Free the parser and any bonus attachment. */ void rb_syck_free_parser(p) SyckParser *p; { struct parser_xtra *bonus = (struct parser_xtra *)p->bonus; if ( bonus != NULL ) S_FREE( bonus ); syck_free_parser(p); } /* * YAML::Syck::Parser.allocate */ VALUE syck_parser_s_alloc _((VALUE)); VALUE syck_parser_s_alloc(class) VALUE class; { VALUE pobj; SyckParser *parser = syck_new_parser(); pobj = Data_Wrap_Struct( class, syck_mark_parser, rb_syck_free_parser, parser ); syck_parser_set_root_on_error( parser, Qnil ); return pobj; } /* * YAML::Syck::Parser.initialize( resolver, options ) */ static VALUE syck_parser_initialize(argc, argv, self) int argc; VALUE *argv; VALUE self; { VALUE options; if (rb_scan_args(argc, argv, "01", &options) == 0) { options = rb_hash_new(); } else { Check_Type(options, T_HASH); } rb_ivar_set(self, s_options, options); return self; } /* * YAML::Syck::Parser.bufsize = Integer */ static VALUE syck_parser_bufsize_set( self, size ) VALUE self, size; { SyckParser *parser; if ( rb_respond_to( size, s_to_i ) ) { int n = NUM2INT(rb_funcall(size, s_to_i, 0)); Data_Get_Struct(self, SyckParser, parser); parser->bufsize = n; } return self; } /* * YAML::Syck::Parser.bufsize => Integer */ static VALUE syck_parser_bufsize_get( self ) VALUE self; { SyckParser *parser; Data_Get_Struct(self, SyckParser, parser); return INT2FIX( parser->bufsize ); } /* * YAML::Syck::Parser.load( IO or String ) */ VALUE syck_parser_load(argc, argv, self) int argc; VALUE *argv; VALUE self; { VALUE port, proc, model, input; SyckParser *parser; struct parser_xtra *bonus = S_ALLOC_N( struct parser_xtra, 1 ); volatile VALUE hash; /* protect from GC */ rb_scan_args(argc, argv, "11", &port, &proc); input = rb_hash_aref( rb_attr_get( self, s_options ), sym_input ); model = rb_hash_aref( rb_attr_get( self, s_options ), sym_model ); Data_Get_Struct(self, SyckParser, parser); syck_set_model( self, input, model ); bonus->taint = syck_parser_assign_io(parser, port); bonus->data = hash = rb_hash_new(); bonus->resolver = rb_attr_get( self, s_resolver ); if ( NIL_P( proc ) ) bonus->proc = 0; else bonus->proc = proc; parser->bonus = (void *)bonus; return syck_parse( parser ); } /* * YAML::Syck::Parser.load_documents( IO or String ) { |doc| } */ VALUE syck_parser_load_documents(argc, argv, self) int argc; VALUE *argv; VALUE self; { VALUE port, proc, v, input, model; SyckParser *parser; struct parser_xtra *bonus = S_ALLOC_N( struct parser_xtra, 1 ); volatile VALUE hash; rb_scan_args(argc, argv, "1&", &port, &proc); input = rb_hash_aref( rb_attr_get( self, s_options ), sym_input ); model = rb_hash_aref( rb_attr_get( self, s_options ), sym_model ); Data_Get_Struct(self, SyckParser, parser); syck_set_model( self, input, model ); bonus->taint = syck_parser_assign_io(parser, port); bonus->resolver = rb_attr_get( self, s_resolver ); bonus->proc = 0; parser->bonus = (void *)bonus; while ( 1 ) { /* Reset hash for tracking nodes */ bonus->data = hash = rb_hash_new(); /* Parse a document */ v = syck_parse( parser ); if ( parser->eof == 1 ) { break; } /* Pass document to block */ rb_funcall( proc, s_call, 1, v ); } return Qnil; } /* * YAML::Syck::Parser#set_resolver */ VALUE syck_parser_set_resolver( self, resolver ) VALUE self, resolver; { rb_ivar_set( self, s_resolver, resolver ); return self; } /* * YAML::Syck::Resolver.initialize */ static VALUE syck_resolver_initialize( self ) VALUE self; { VALUE tags = rb_hash_new(); rb_ivar_set(self, s_tags, rb_hash_new()); return self; } /* * YAML::Syck::Resolver#add_type */ VALUE syck_resolver_add_type( self, taguri, cls ) VALUE self, taguri, cls; { VALUE tags = rb_attr_get(self, s_tags); rb_hash_aset( tags, taguri, cls ); return Qnil; } /* * YAML::Syck::Resolver#use_types_at */ VALUE syck_resolver_use_types_at( self, hsh ) VALUE self, hsh; { rb_ivar_set( self, s_tags, hsh ); return Qnil; } /* * YAML::Syck::Resolver#detect_implicit */ VALUE syck_resolver_detect_implicit( self, val ) VALUE self, val; { char *type_id; return rb_str_new2( "" ); } /* * YAML::Syck::Resolver#node_import */ VALUE syck_resolver_node_import( self, node ) VALUE self, node; { SyckNode *n; VALUE obj; int i = 0; Data_Get_Struct(node, SyckNode, n); switch (n->kind) { case syck_str_kind: obj = rb_str_new( n->data.str->ptr, n->data.str->len ); break; case syck_seq_kind: obj = rb_ary_new2( n->data.list->idx ); for ( i = 0; i < n->data.list->idx; i++ ) { rb_ary_store( obj, i, syck_seq_read( n, i ) ); } break; case syck_map_kind: obj = rb_hash_new(); for ( i = 0; i < n->data.pairs->idx; i++ ) { VALUE k = syck_map_read( n, map_key, i ); VALUE v = syck_map_read( n, map_value, i ); int skip_aset = 0; /* * Handle merge keys */ if ( rb_obj_is_kind_of( k, cMergeKey ) ) { if ( rb_obj_is_kind_of( v, rb_cHash ) ) { VALUE dup = rb_funcall( v, s_dup, 0 ); rb_funcall( dup, s_update, 1, obj ); obj = dup; skip_aset = 1; } else if ( rb_obj_is_kind_of( v, rb_cArray ) ) { VALUE end = rb_ary_pop( v ); if ( rb_obj_is_kind_of( end, rb_cHash ) ) { VALUE dup = rb_funcall( end, s_dup, 0 ); v = rb_ary_reverse( v ); rb_ary_push( v, obj ); rb_iterate( rb_each, v, syck_merge_i, dup ); obj = dup; skip_aset = 1; } } } else if ( rb_obj_is_kind_of( k, cDefaultKey ) ) { rb_funcall( obj, s_default_set, 1, v ); skip_aset = 1; } if ( ! skip_aset ) { rb_hash_aset( obj, k, v ); } } break; } if ( n->type_id != NULL ) { obj = rb_funcall( self, s_transfer, 2, rb_str_new2( n->type_id ), obj ); } return obj; } /* * Set instance variables */ VALUE syck_set_ivars( vars, obj ) VALUE vars, obj; { VALUE ivname = rb_ary_entry( vars, 0 ); char *ivn; StringValue( ivname ); ivn = S_ALLOC_N( char, RSTRING(ivname)->len + 2 ); ivn[0] = '@'; ivn[1] = '\0'; strncat( ivn, RSTRING(ivname)->ptr, RSTRING(ivname)->len ); rb_iv_set( obj, ivn, rb_ary_entry( vars, 1 ) ); S_FREE( ivn ); return Qnil; } /* * YAML::Syck::Resolver#const_find */ VALUE syck_const_find( const_name ) VALUE const_name; { VALUE tclass = rb_cObject; VALUE tparts = rb_str_split( const_name, "::" ); int i = 0; for ( i = 0; i < RARRAY(tparts)->len; i++ ) { VALUE tpart = rb_to_id( rb_ary_entry( tparts, i ) ); if ( !rb_const_defined( tclass, tpart ) ) return Qnil; tclass = rb_const_get( tclass, tpart ); } return tclass; } /* * YAML::Syck::Resolver#transfer */ VALUE syck_resolver_transfer( self, type, val ) VALUE self, type, val; { if (NIL_P(type) || RSTRING(StringValue(type))->len == 0) { type = rb_funcall( self, s_detect_implicit, 1, val ); } if ( ! (NIL_P(type) || RSTRING(StringValue(type))->len == 0) ) { VALUE str_xprivate = rb_str_new2( "x-private" ); VALUE colon = rb_str_new2( ":" ); VALUE tags = rb_attr_get(self, s_tags); VALUE target_class = rb_hash_aref( tags, type ); VALUE subclass = target_class; VALUE obj = Qnil; /* * Should no tag match exactly, check for subclass format */ if ( NIL_P( target_class ) ) { VALUE subclass_parts = rb_ary_new(); VALUE parts = rb_str_split( type, ":" ); while ( RARRAY(parts)->len > 1 ) { VALUE partial; rb_ary_unshift( subclass_parts, rb_ary_pop( parts ) ); partial = rb_ary_join( parts, colon ); target_class = rb_hash_aref( tags, partial ); if ( NIL_P( target_class ) ) { rb_str_append( partial, colon ); target_class = rb_hash_aref( tags, partial ); } /* * Possible subclass found, see if it supports subclassing */ if ( ! NIL_P( target_class ) ) { subclass = target_class; if ( RARRAY(subclass_parts)->len > 0 && rb_respond_to( target_class, s_tag_subclasses ) && RTEST( rb_funcall( target_class, s_tag_subclasses, 0 ) ) ) { VALUE subclass_v; subclass = rb_ary_join( subclass_parts, colon ); subclass = rb_funcall( target_class, s_tag_read_class, 1, subclass ); subclass_v = syck_const_find( subclass ); if ( subclass_v != Qnil ) { subclass = subclass_v; } else if ( rb_cObject == target_class && subclass_v == Qnil ) { // StringValue(subclass); // printf( "No class: %s\n", RSTRING(subclass)->ptr ); target_class = cYObject; type = subclass; subclass = cYObject; } } break; } } } /* rb_raise(rb_eTypeError, "invalid typing scheme: %s given", * scheme); */ if ( rb_respond_to( target_class, s_call ) ) { obj = rb_funcall( target_class, s_call, 2, type, val ); } else { if ( rb_respond_to( target_class, s_yaml_new ) ) { obj = rb_funcall( target_class, s_yaml_new, 3, subclass, type, val ); } else if ( !NIL_P( target_class ) ) { obj = rb_obj_alloc( subclass ); if ( rb_respond_to( obj, s_yaml_initialize ) ) { rb_funcall( obj, s_yaml_initialize, 2, type, val ); } else if ( !NIL_P( obj ) && rb_obj_is_instance_of( val, rb_cHash ) ) { rb_iterate( rb_each, val, syck_set_ivars, obj ); } } else { VALUE parts = rb_str_split( type, ":" ); VALUE scheme = rb_ary_shift( parts ); if ( rb_str_cmp( scheme, str_xprivate ) == 0 ) { VALUE name = rb_ary_join( parts, colon ); obj = rb_funcall( cPrivateType, s_new, 2, name, val ); } else { VALUE domain = rb_ary_shift( parts ); VALUE name = rb_ary_join( parts, colon ); obj = rb_funcall( cDomainType, s_new, 3, domain, name, val ); } } } val = obj; } return val; } /* * YAML::Syck::Resolver#tagurize */ VALUE syck_resolver_tagurize( self, val ) VALUE self, val; { VALUE tmp = rb_check_string_type(val); if ( !NIL_P(tmp) ) { char *taguri; val = tmp; taguri = syck_type_id_to_uri( RSTRING(val)->ptr ); return rb_str_new2( taguri ); } return val; } /* * YAML::Syck::DefaultResolver#detect_implicit */ VALUE syck_defaultresolver_detect_implicit( self, val ) VALUE self, val; { char *type_id; VALUE tmp = rb_check_string_type(val); if ( !NIL_P(tmp) ) { val = tmp; type_id = syck_match_implicit( RSTRING(val)->ptr, RSTRING(val)->len ); return rb_str_new2( type_id ); } return rb_str_new2( "" ); } /* * YAML::Syck::DefaultResolver#node_import */ VALUE syck_defaultresolver_node_import( self, node ) VALUE self, node; { SyckNode *n; VALUE obj; Data_Get_Struct( node, SyckNode, n ); if ( !yaml_org_handler( n, &obj ) ) { obj = rb_funcall( self, s_transfer, 2, rb_str_new2( n->type_id ), obj ); } return obj; } /* * YAML::Syck::GenericResolver#node_import */ VALUE syck_genericresolver_node_import( self, node ) VALUE self, node; { SyckNode *n; int i = 0; VALUE t = Qnil, obj = Qnil, v = Qnil, style = Qnil; Data_Get_Struct(node, SyckNode, n); if ( n->type_id != NULL ) { t = rb_str_new2(n->type_id); } switch (n->kind) { case syck_str_kind: { v = rb_str_new( n->data.str->ptr, n->data.str->len ); if ( n->data.str->style == scalar_1quote ) { style = sym_1quote; } else if ( n->data.str->style == scalar_2quote ) { style = sym_2quote; } else if ( n->data.str->style == scalar_fold ) { style = sym_fold; } else if ( n->data.str->style == scalar_literal ) { style = sym_literal; } else if ( n->data.str->style == scalar_plain ) { style = sym_plain; } obj = rb_funcall( cScalar, s_new, 3, t, v, style ); } break; case syck_seq_kind: rb_iv_set(obj, "@kind", sym_seq); v = rb_ary_new2( syck_seq_count( n ) ); for ( i = 0; i < syck_seq_count( n ); i++ ) { rb_ary_store( v, i, syck_seq_read( n, i ) ); } if ( n->data.list->style == seq_inline ) { style = sym_inline; } obj = rb_funcall( cSeq, s_new, 3, t, v, style ); break; case syck_map_kind: rb_iv_set(obj, "@kind", sym_map); v = rb_hash_new(); for ( i = 0; i < syck_map_count( n ); i++ ) { rb_hash_aset( v, syck_map_read( n, map_key, i ), syck_map_read( n, map_value, i ) ); } if ( n->data.pairs->style == map_inline ) { style = sym_inline; } obj = rb_funcall( cMap, s_new, 3, t, v, style ); break; } return obj; } /* * YAML::Syck::BadAlias.initialize */ VALUE syck_badalias_initialize( self, val ) VALUE self, val; { rb_iv_set( self, "@name", val ); return self; } /* * YAML::Syck::BadAlias.<=> */ VALUE syck_badalias_cmp( alias1, alias2 ) VALUE alias1, alias2; { VALUE str1 = rb_ivar_get( alias1, s_name ); VALUE str2 = rb_ivar_get( alias2, s_name ); VALUE val = rb_funcall( str1, s_cmp, 1, str2 ); return val; } /* * YAML::DomainType.initialize */ VALUE syck_domaintype_initialize( self, domain, type_id, val ) VALUE self, type_id, val; { rb_iv_set( self, "@domain", domain ); rb_iv_set( self, "@type_id", type_id ); rb_iv_set( self, "@value", val ); return self; } /* * YAML::Object.initialize */ VALUE syck_yobject_initialize( self, klass, ivars ) VALUE self, klass, ivars; { rb_iv_set( self, "@class", klass ); rb_iv_set( self, "@ivars", ivars ); return self; } /* * YAML::PrivateType.initialize */ VALUE syck_privatetype_initialize( self, type_id, val ) VALUE self, type_id, val; { rb_iv_set( self, "@type_id", type_id ); rb_iv_set( self, "@value", val ); return self; } /* * Mark node contents. */ static void syck_node_mark( n ) SyckNode *n; { int i; switch ( n->kind ) { case syck_seq_kind: for ( i = 0; i < n->data.list->idx; i++ ) { rb_gc_mark( syck_seq_read( n, i ) ); } break; case syck_map_kind: for ( i = 0; i < n->data.pairs->idx; i++ ) { rb_gc_mark( syck_map_read( n, map_key, i ) ); rb_gc_mark( syck_map_read( n, map_value, i ) ); } break; } } /* * Don't free Ruby data, Ruby will do that */ void rb_syck_free_node( SyckNode *n ) { switch ( n->kind ) { case syck_str_kind: S_FREE( n->data.str ); n->data.str = NULL; break; case syck_seq_kind: if ( n->data.list != NULL ) { S_FREE( n->data.list->items ); S_FREE( n->data.list ); n->data.list = NULL; } break; case syck_map_kind: if ( n->data.pairs != NULL ) { S_FREE( n->data.pairs->keys ); S_FREE( n->data.pairs->values ); S_FREE( n->data.pairs ); n->data.pairs = NULL; } break; } S_FREE( n ); } /* * YAML::Syck::Scalar.allocate */ VALUE syck_scalar_alloc( class ) VALUE class; { SyckNode *node = syck_alloc_str(); VALUE obj = Data_Wrap_Struct( class, syck_node_mark, rb_syck_free_node, node ); node->id = obj; return obj; } /* * YAML::Syck::Scalar.initialize */ VALUE syck_scalar_initialize( self, type_id, val, style ) VALUE self, type_id, val, style; { rb_iv_set( self, "@kind", sym_scalar ); rb_funcall( self, s_type_id_set, 1, type_id ); rb_funcall( self, s_value_set, 1, val ); rb_funcall( self, s_style_set, 1, style ); return self; } /* * YAML::Syck::Scalar.style= */ VALUE syck_scalar_style_set( self, style ) VALUE self, style; { SyckNode *node; Data_Get_Struct( self, SyckNode, node ); if ( NIL_P( style ) ) { node->data.str->style = scalar_none; } else if ( style == sym_1quote ) { node->data.str->style = scalar_1quote; } else if ( style == sym_2quote ) { node->data.str->style = scalar_2quote; } else if ( style == sym_fold ) { node->data.str->style = scalar_fold; } else if ( style == sym_literal ) { node->data.str->style = scalar_literal; } else if ( style == sym_plain ) { node->data.str->style = scalar_plain; } rb_iv_set( self, "@style", style ); return self; } /* * YAML::Syck::Scalar.value= */ VALUE syck_scalar_value_set( self, val ) VALUE self, val; { SyckNode *node; Data_Get_Struct( self, SyckNode, node ); StringValue( val ); node->data.str->ptr = RSTRING(val)->ptr; node->data.str->len = RSTRING(val)->len; node->data.str->style = scalar_none; rb_iv_set( self, "@value", val ); return val; } /* * YAML::Syck::Seq.allocate */ VALUE syck_seq_alloc( class ) VALUE class; { SyckNode *node; VALUE obj; node = syck_alloc_seq(); obj = Data_Wrap_Struct( class, syck_node_mark, rb_syck_free_node, node ); node->id = obj; return obj; } /* * YAML::Syck::Seq.initialize */ VALUE syck_seq_initialize( self, type_id, val, style ) VALUE self, type_id, val, style; { SyckNode *node; Data_Get_Struct( self, SyckNode, node ); rb_iv_set( self, "@kind", sym_seq ); rb_funcall( self, s_type_id_set, 1, type_id ); rb_funcall( self, s_value_set, 1, val ); rb_funcall( self, s_style_set, 1, style ); return self; } /* * YAML::Syck::Seq.value= */ VALUE syck_seq_value_set( self, val ) VALUE self, val; { SyckNode *node; Data_Get_Struct( self, SyckNode, node ); val = rb_check_array_type( val ); if ( !NIL_P( val ) ) { int i; syck_seq_empty( node ); for ( i = 0; i < RARRAY( val )->len; i++ ) { syck_seq_add( node, rb_ary_entry(val, i) ); } } rb_iv_set( self, "@value", val ); return val; } /* * YAML::Syck::Seq.add */ VALUE syck_seq_add_m( self, val ) VALUE self, val; { SyckNode *node; VALUE emitter = rb_ivar_get( self, s_emitter ); Data_Get_Struct( self, SyckNode, node ); if ( rb_respond_to( emitter, s_node_export ) ) { val = rb_funcall( emitter, s_node_export, 1, val ); } syck_seq_add( node, val ); rb_ary_push( rb_ivar_get( self, s_value ), val ); return self; } /* * YAML::Syck::Seq.style= */ VALUE syck_seq_style_set( self, style ) VALUE self, style; { SyckNode *node; Data_Get_Struct( self, SyckNode, node ); if ( style == sym_inline ) { node->data.list->style = seq_inline; } else { node->data.list->style = seq_none; } rb_iv_set( self, "@style", style ); return self; } /* * YAML::Syck::Map.allocate */ VALUE syck_map_alloc( class ) VALUE class; { SyckNode *node; VALUE obj; node = syck_alloc_map(); obj = Data_Wrap_Struct( class, syck_node_mark, rb_syck_free_node, node ); node->id = obj; return obj; } /* * YAML::Syck::Map.initialize */ VALUE syck_map_initialize( self, type_id, val, style ) VALUE self, type_id, val, style; { SyckNode *node; Data_Get_Struct( self, SyckNode, node ); if ( !NIL_P( val ) ) { VALUE hsh = rb_check_convert_type(val, T_HASH, "Hash", "to_hash"); VALUE keys; int i; if ( NIL_P(hsh) ) { rb_raise( rb_eTypeError, "wrong argument type" ); } keys = rb_funcall( hsh, s_keys, 0 ); for ( i = 0; i < RARRAY(keys)->len; i++ ) { VALUE key = rb_ary_entry(keys, i); syck_map_add( node, key, rb_hash_aref(hsh, key) ); } } rb_iv_set( self, "@kind", sym_seq ); rb_funcall( self, s_type_id_set, 1, type_id ); rb_funcall( self, s_value_set, 1, val ); rb_funcall( self, s_style_set, 1, style ); return self; } /* * YAML::Syck::Map.value= */ VALUE syck_map_value_set( self, val ) VALUE self, val; { SyckNode *node; Data_Get_Struct( self, SyckNode, node ); if ( !NIL_P( val ) ) { VALUE hsh = rb_check_convert_type(val, T_HASH, "Hash", "to_hash"); VALUE keys; int i; if ( NIL_P(hsh) ) { rb_raise( rb_eTypeError, "wrong argument type" ); } syck_map_empty( node ); keys = rb_funcall( hsh, s_keys, 0 ); for ( i = 0; i < RARRAY(keys)->len; i++ ) { VALUE key = rb_ary_entry(keys, i); syck_map_add( node, key, rb_hash_aref(hsh, key) ); } } rb_iv_set( self, "@value", val ); return val; } /* * YAML::Syck::Map.add */ VALUE syck_map_add_m( self, key, val ) VALUE self, key, val; { SyckNode *node; VALUE emitter = rb_ivar_get( self, s_emitter ); Data_Get_Struct( self, SyckNode, node ); if ( rb_respond_to( emitter, s_node_export ) ) { key = rb_funcall( emitter, s_node_export, 1, key ); val = rb_funcall( emitter, s_node_export, 1, val ); } syck_map_add( node, key, val ); rb_hash_aset( rb_ivar_get( self, s_value ), key, val ); return self; } /* * YAML::Syck::Map.style= */ VALUE syck_map_style_set( self, style ) VALUE self, style; { SyckNode *node; Data_Get_Struct( self, SyckNode, node ); if ( style == sym_inline ) { node->data.pairs->style = map_inline; } else { node->data.pairs->style = map_none; } rb_iv_set( self, "@style", style ); return self; } /* * Cloning method for all node types */ VALUE syck_node_init_copy( copy, orig ) VALUE copy, orig; { SyckNode *copy_n; SyckNode *orig_n; if ( copy == orig ) return copy; if ( TYPE( orig ) != T_DATA || RDATA( orig )->dfree != ( RUBY_DATA_FUNC )rb_syck_free_node ) { rb_raise( rb_eTypeError, "wrong argument type" ); } Data_Get_Struct( orig, SyckNode, orig_n ); Data_Get_Struct( copy, SyckNode, copy_n ); MEMCPY( copy_n, orig_n, SyckNode, 1 ); return copy; } /* * YAML::Syck::Node#type_id= */ VALUE syck_node_type_id_set( self, type_id ) VALUE self, type_id; { SyckNode *node; Data_Get_Struct( self, SyckNode, node ); if ( node->type_id != NULL ) S_FREE( node->type_id ); if ( NIL_P( type_id ) ) { node->type_id = NULL; } else { node->type_id = StringValuePtr( type_id ); } rb_iv_set( self, "@type_id", type_id ); return type_id; } /* * YAML::Syck::Node.transform */ VALUE syck_node_transform( self ) VALUE self; { VALUE t; SyckNode *n; SyckNode *orig_n; Data_Get_Struct(self, SyckNode, orig_n); switch (orig_n->kind) { case syck_map_kind: { int i; n = syck_alloc_map(); for ( i = 0; i < orig_n->data.pairs->idx; i++ ) { syck_map_add( n, rb_funcall( syck_map_read( orig_n, map_key, i ), s_transform, 0 ), rb_funcall( syck_map_read( orig_n, map_value, i ), s_transform, 0 ) ); } } break; case syck_seq_kind: { int i; n = syck_alloc_seq(); for ( i = 0; i < orig_n->data.list->idx; i++ ) { syck_seq_add( n, rb_funcall( syck_seq_read( orig_n, i ), s_transform, 0 ) ); } } break; case syck_str_kind: n = syck_new_str2( orig_n->data.str->ptr, orig_n->data.str->len, orig_n->data.str->style ); break; } if ( orig_n->type_id != NULL ) { n->type_id = syck_strndup( orig_n->type_id, strlen( orig_n->type_id ) ); } if ( orig_n->anchor != NULL ) { n->anchor = syck_strndup( orig_n->anchor, strlen( orig_n->anchor ) ); } t = Data_Wrap_Struct( cNode, NULL, NULL, n ); n->id = t; t = rb_funcall( oDefaultResolver, s_node_import, 1, t ); syck_free_node( n ); return t; } /* * Emitter callback: assembles YAML document events from * Ruby symbols. This is a brilliant way to do it. * No one could possibly object. */ void rb_syck_emitter_handler(e, data) SyckEmitter *e; st_data_t data; { SyckNode *n; Data_Get_Struct((VALUE)data, SyckNode, n); switch (n->kind) { case syck_map_kind: { int i; syck_emit_map( e, n->type_id, n->data.pairs->style ); for ( i = 0; i < n->data.pairs->idx; i++ ) { syck_emit_item( e, syck_map_read( n, map_key, i ) ); syck_emit_item( e, syck_map_read( n, map_value, i ) ); } syck_emit_end( e ); } break; case syck_seq_kind: { int i; syck_emit_seq( e, n->type_id, n->data.list->style ); for ( i = 0; i < n->data.list->idx; i++ ) { syck_emit_item( e, syck_seq_read( n, i ) ); } syck_emit_end( e ); } break; case syck_str_kind: { syck_emit_scalar( e, n->type_id, n->data.str->style, 0, 0, 0, n->data.str->ptr, n->data.str->len ); } break; } } /* * Handle output from the emitter */ void rb_syck_output_handler( emitter, str, len ) SyckEmitter *emitter; char *str; long len; { struct emitter_xtra *bonus = (struct emitter_xtra *)emitter->bonus; VALUE dest = bonus->port; if (TYPE(dest) == T_STRING) { rb_str_cat( dest, str, len ); } else { rb_io_write( dest, rb_str_new( str, len ) ); } } /* * Helper function for marking nodes in the anchor * symbol table. */ void syck_out_mark( emitter, node ) VALUE emitter, node; { SyckEmitter *emitterPtr; struct emitter_xtra *bonus; Data_Get_Struct(emitter, SyckEmitter, emitterPtr); bonus = (struct emitter_xtra *)emitterPtr->bonus; rb_ivar_set( node, s_emitter, emitter ); /* syck_emitter_mark_node( emitterPtr, (st_data_t)node ); */ if ( !NIL_P( bonus->oid ) ) { rb_hash_aset( bonus->data, bonus->oid, node ); } } /* * Mark emitter values. */ static void syck_mark_emitter(emitter) SyckEmitter *emitter; { struct emitter_xtra *bonus; if ( emitter->bonus != NULL ) { bonus = (struct emitter_xtra *)emitter->bonus; rb_gc_mark( bonus->data ); rb_gc_mark( bonus->port ); } } /* * Free the emitter and any bonus attachment. */ void rb_syck_free_emitter(e) SyckEmitter *e; { struct emitter_xtra *bonus = (struct emitter_xtra *)e->bonus; if ( bonus != NULL ) S_FREE( bonus ); syck_free_emitter(e); } /* * YAML::Syck::Emitter.allocate */ VALUE syck_emitter_s_alloc _((VALUE)); VALUE syck_emitter_s_alloc(class) VALUE class; { VALUE pobj; SyckEmitter *emitter = syck_new_emitter(); pobj = Data_Wrap_Struct( class, syck_mark_emitter, rb_syck_free_emitter, emitter ); syck_emitter_handler( emitter, rb_syck_emitter_handler ); syck_output_handler( emitter, rb_syck_output_handler ); rb_ivar_set( pobj, s_out, rb_funcall( cOut, s_new, 1, pobj ) ); return pobj; } /* * YAML::Syck::Emitter.reset( options ) */ VALUE syck_emitter_reset( argc, argv, self ) int argc; VALUE *argv; VALUE self; { VALUE options, tmp; SyckEmitter *emitter; struct emitter_xtra *bonus; volatile VALUE hash; /* protect from GC */ Data_Get_Struct(self, SyckEmitter, emitter); bonus = (struct emitter_xtra *)emitter->bonus; if ( bonus != NULL ) S_FREE( bonus ); bonus = S_ALLOC_N( struct emitter_xtra, 1 ); bonus->port = rb_str_new2( "" ); bonus->data = hash = rb_hash_new(); if (rb_scan_args(argc, argv, "01", &options) == 0) { options = rb_hash_new(); rb_ivar_set(self, s_options, options); } else if ( !NIL_P(tmp = rb_check_string_type(options)) ) { bonus->port = tmp; } else if ( rb_respond_to( options, s_write ) ) { bonus->port = options; } else { Check_Type(options, T_HASH); rb_ivar_set(self, s_options, options); } emitter->bonus = (void *)bonus; rb_ivar_set(self, s_level, INT2FIX(0)); rb_ivar_set(self, s_resolver, Qnil); return self; } /* * YAML::Syck::Emitter.emit( object_id ) { |out| ... } */ VALUE syck_emitter_emit( argc, argv, self ) int argc; VALUE *argv; VALUE self; { VALUE oid, proc; char *anchor_name; SyckEmitter *emitter; struct emitter_xtra *bonus; SYMID symple; int level = FIX2INT(rb_ivar_get(self, s_level)) + 1; rb_ivar_set(self, s_level, INT2FIX(level)); rb_scan_args(argc, argv, "1&", &oid, &proc); Data_Get_Struct(self, SyckEmitter, emitter); bonus = (struct emitter_xtra *)emitter->bonus; /* Calculate anchors, normalize nodes, build a simpler symbol table */ bonus->oid = oid; if ( !NIL_P( oid ) && RTEST( rb_funcall( bonus->data, s_haskey, 1, oid ) ) ) { symple = rb_hash_aref( bonus->data, oid ); } else { symple = rb_funcall( proc, s_call, 1, rb_ivar_get( self, s_out ) ); } syck_emitter_mark_node( emitter, (st_data_t)symple ); /* Second pass, build emitted string */ level -= 1; rb_ivar_set(self, s_level, INT2FIX(level)); if ( level == 0 ) { syck_emit(emitter, (st_data_t)symple); syck_emitter_flush(emitter, 0); return bonus->port; } return symple; } /* * YAML::Syck::Emitter#node_export */ VALUE syck_emitter_node_export( self, node ) VALUE self, node; { return rb_funcall( node, s_to_yaml, 1, self ); } /* * YAML::Syck::Emitter#set_resolver */ VALUE syck_emitter_set_resolver( self, resolver ) VALUE self, resolver; { rb_ivar_set( self, s_resolver, resolver ); return self; } /* * YAML::Syck::Out::initialize */ VALUE syck_out_initialize( self, emitter ) VALUE self, emitter; { rb_ivar_set( self, s_emitter, emitter ); return self; } /* * YAML::Syck::Out::map */ VALUE syck_out_map( argc, argv, self ) int argc; VALUE *argv; VALUE self; { VALUE type_id, style, map; if (rb_scan_args(argc, argv, "11", &type_id, &style) == 1) { style = Qnil; } map = rb_funcall( cMap, s_new, 3, type_id, rb_hash_new(), style ); syck_out_mark( rb_ivar_get( self, s_emitter ), map ); rb_yield( map ); return map; } /* * YAML::Syck::Out::seq */ VALUE syck_out_seq( argc, argv, self ) int argc; VALUE *argv; VALUE self; { VALUE type_id, style, seq; if (rb_scan_args(argc, argv, "11", &type_id, &style) == 1) { style = Qnil; } seq = rb_funcall( cSeq, s_new, 3, type_id, rb_ary_new(), style ); syck_out_mark( rb_ivar_get( self, s_emitter ), seq ); rb_yield( seq ); return seq; } /* * YAML::Syck::Out::scalar syck_out_scalar( self, type_id, str, style ) VALUE self, type_id, str, style; */ VALUE syck_out_scalar( argc, argv, self ) int argc; VALUE *argv; VALUE self; { VALUE type_id, str, style, scalar; if (rb_scan_args(argc, argv, "21", &type_id, &str, &style) == 2) { style = Qnil; } scalar = rb_funcall( cScalar, s_new, 3, type_id, str, style ); syck_out_mark( rb_ivar_get( self, s_emitter ), scalar ); return scalar; } /* * Initialize Syck extension */ void Init_syck() { VALUE rb_yaml = rb_define_module( "YAML" ); VALUE rb_syck = rb_define_module_under( rb_yaml, "Syck" ); rb_define_const( rb_syck, "VERSION", rb_str_new2( SYCK_VERSION ) ); rb_define_module_function( rb_syck, "compile", rb_syck_compile, 1 ); /* * Global symbols */ s_new = rb_intern("new"); s_utc = rb_intern("utc"); s_at = rb_intern("at"); s_to_f = rb_intern("to_f"); s_to_i = rb_intern("to_i"); s_read = rb_intern("read"); s_binmode = rb_intern("binmode"); s_transfer = rb_intern("transfer"); s_call = rb_intern("call"); s_cmp = rb_intern("<=>"); s_intern = rb_intern("intern"); s_update = rb_intern("update"); s_detect_implicit = rb_intern("detect_implicit"); s_dup = rb_intern("dup"); s_default_set = rb_intern("default="); s_match = rb_intern("match"); s_push = rb_intern("push"); s_haskey = rb_intern("has_key?"); s_keys = rb_intern("keys"); s_node_import = rb_intern("node_import"); s_tr_bang = rb_intern("tr!"); s_unpack = rb_intern("unpack"); s_write = rb_intern("write"); s_tag_read_class = rb_intern( "tag_read_class" ); s_tag_subclasses = rb_intern( "tag_subclasses?" ); s_emitter = rb_intern( "emitter" ); s_set_resolver = rb_intern( "set_resolver" ); s_node_export = rb_intern( "node_export" ); s_to_yaml = rb_intern( "to_yaml" ); s_transform = rb_intern( "transform" ); s_yaml_new = rb_intern("yaml_new"); s_yaml_initialize = rb_intern("yaml_initialize"); s_tags = rb_intern("@tags"); s_name = rb_intern("@name"); s_options = rb_intern("@options"); s_kind = rb_intern("@kind"); s_type_id = rb_intern("@type_id"); s_type_id_set = rb_intern("type_id="); s_resolver = rb_intern("@resolver"); s_level = rb_intern( "@level" ); s_style = rb_intern("@style"); s_style_set = rb_intern("style="); s_value = rb_intern("@value"); s_value_set = rb_intern("value="); s_out = rb_intern("@out"); s_input = rb_intern("@input"); sym_model = ID2SYM(rb_intern("Model")); sym_generic = ID2SYM(rb_intern("Generic")); sym_bytecode = ID2SYM(rb_intern("bytecode")); sym_map = ID2SYM(rb_intern("map")); sym_scalar = ID2SYM(rb_intern("scalar")); sym_seq = ID2SYM(rb_intern("seq")); sym_1quote = ID2SYM(rb_intern("quote1")); sym_2quote = ID2SYM(rb_intern("quote2")); sym_fold = ID2SYM(rb_intern("fold")); sym_literal = ID2SYM(rb_intern("literal")); sym_plain = ID2SYM(rb_intern("plain")); sym_inline = ID2SYM(rb_intern("inline")); /* * Define YAML::Syck::Resolver class */ cResolver = rb_define_class_under( rb_syck, "Resolver", rb_cObject ); rb_define_attr( cResolver, "tags", 1, 1 ); rb_define_method( cResolver, "initialize", syck_resolver_initialize, 0 ); rb_define_method( cResolver, "add_type", syck_resolver_add_type, 2 ); rb_define_method( cResolver, "use_types_at", syck_resolver_use_types_at, 1 ); rb_define_method( cResolver, "detect_implicit", syck_resolver_detect_implicit, 1 ); rb_define_method( cResolver, "transfer", syck_resolver_transfer, 2 ); rb_define_method( cResolver, "node_import", syck_resolver_node_import, 1 ); rb_define_method( cResolver, "tagurize", syck_resolver_tagurize, 1 ); oDefaultResolver = rb_funcall( cResolver, rb_intern( "new" ), 0 ); rb_define_singleton_method( oDefaultResolver, "node_import", syck_defaultresolver_node_import, 1 ); rb_define_singleton_method( oDefaultResolver, "detect_implicit", syck_defaultresolver_detect_implicit, 1 ); rb_define_const( rb_syck, "DefaultResolver", oDefaultResolver ); oGenericResolver = rb_funcall( cResolver, rb_intern( "new" ), 0 ); rb_define_singleton_method( oGenericResolver, "node_import", syck_genericresolver_node_import, 1 ); rb_define_const( rb_syck, "GenericResolver", oGenericResolver ); /* * Define YAML::Syck::Parser class */ cParser = rb_define_class_under( rb_syck, "Parser", rb_cObject ); rb_define_attr( cParser, "options", 1, 1 ); rb_define_attr( cParser, "resolver", 1, 1 ); rb_define_attr( cParser, "input", 1, 1 ); rb_define_alloc_func( cParser, syck_parser_s_alloc ); rb_define_method(cParser, "initialize", syck_parser_initialize, -1 ); rb_define_method(cParser, "bufsize=", syck_parser_bufsize_set, 1 ); rb_define_method(cParser, "bufsize", syck_parser_bufsize_get, 0 ); rb_define_method(cParser, "load", syck_parser_load, -1); rb_define_method(cParser, "load_documents", syck_parser_load_documents, -1); rb_define_method(cParser, "set_resolver", syck_parser_set_resolver, 1); /* * Define YAML::Syck::Node class */ cNode = rb_define_class_under( rb_syck, "Node", rb_cObject ); rb_define_method( cNode, "initialize_copy", syck_node_init_copy, 1 ); rb_define_attr( cNode, "emitter", 1, 1 ); rb_define_attr( cNode, "resolver", 1, 1 ); rb_define_attr( cNode, "kind", 1, 0 ); rb_define_attr( cNode, "type_id", 1, 0 ); rb_define_attr( cNode, "value", 1, 0 ); rb_define_method( cNode, "type_id=", syck_node_type_id_set, 1 ); rb_define_method( cNode, "transform", syck_node_transform, 0); /* * Define YAML::Syck::Scalar, YAML::Syck::Seq, YAML::Syck::Map -- * all are the publicly usable variants of YAML::Syck::Node */ cScalar = rb_define_class_under( rb_syck, "Scalar", cNode ); rb_define_alloc_func( cScalar, syck_scalar_alloc ); rb_define_attr( cNode, "value", 1, 0 ); rb_define_method( cScalar, "initialize", syck_scalar_initialize, 3 ); rb_define_method( cScalar, "value=", syck_scalar_value_set, 1 ); rb_define_method( cScalar, "style=", syck_scalar_style_set, 1 ); cSeq = rb_define_class_under( rb_syck, "Seq", cNode ); rb_define_alloc_func( cSeq, syck_seq_alloc ); rb_define_method( cSeq, "initialize", syck_seq_initialize, 3 ); rb_define_method( cSeq, "value=", syck_seq_value_set, 1 ); rb_define_method( cSeq, "add", syck_seq_add_m, 1 ); rb_define_method( cSeq, "style=", syck_seq_style_set, 1 ); cMap = rb_define_class_under( rb_syck, "Map", cNode ); rb_define_alloc_func( cMap, syck_map_alloc ); rb_define_method( cMap, "initialize", syck_map_initialize, 3 ); rb_define_method( cMap, "value=", syck_map_value_set, 1 ); rb_define_method( cMap, "add", syck_map_add_m, 2 ); rb_define_method( cMap, "style=", syck_map_style_set, 1 ); /* * Define YAML::PrivateType class */ cPrivateType = rb_define_class_under( rb_yaml, "PrivateType", rb_cObject ); rb_define_attr( cPrivateType, "type_id", 1, 1 ); rb_define_attr( cPrivateType, "value", 1, 1 ); rb_define_method( cPrivateType, "initialize", syck_privatetype_initialize, 2); /* * Define YAML::DomainType class */ cDomainType = rb_define_class_under( rb_yaml, "DomainType", rb_cObject ); rb_define_attr( cDomainType, "domain", 1, 1 ); rb_define_attr( cDomainType, "type_id", 1, 1 ); rb_define_attr( cDomainType, "value", 1, 1 ); rb_define_method( cDomainType, "initialize", syck_domaintype_initialize, 3); /* * Define YAML::Object class */ cYObject = rb_define_class_under( rb_yaml, "Object", rb_cObject ); rb_define_attr( cYObject, "class", 1, 1 ); rb_define_attr( cYObject, "ivars", 1, 1 ); rb_define_method( cYObject, "initialize", syck_yobject_initialize, 2); rb_define_method( cYObject, "yaml_initialize", syck_yobject_initialize, 2); /* * Define YAML::Syck::BadAlias class */ cBadAlias = rb_define_class_under( rb_syck, "BadAlias", rb_cObject ); rb_define_attr( cBadAlias, "name", 1, 1 ); rb_define_method( cBadAlias, "initialize", syck_badalias_initialize, 1); rb_define_method( cBadAlias, "<=>", syck_badalias_cmp, 1); rb_include_module( cBadAlias, rb_const_get( rb_cObject, rb_intern("Comparable") ) ); /* * Define YAML::Syck::MergeKey class */ cMergeKey = rb_define_class_under( rb_syck, "MergeKey", rb_cObject ); /* * Define YAML::Syck::DefaultKey class */ cDefaultKey = rb_define_class_under( rb_syck, "DefaultKey", rb_cObject ); /* * Define YAML::Syck::Out classes */ cOut = rb_define_class_under( rb_syck, "Out", rb_cObject ); rb_define_attr( cOut, "emitter", 1, 1 ); rb_define_method( cOut, "initialize", syck_out_initialize, 1 ); rb_define_method( cOut, "map", syck_out_map, -1 ); rb_define_method( cOut, "seq", syck_out_seq, -1 ); rb_define_method( cOut, "scalar", syck_out_scalar, -1 ); /* * Define YAML::Syck::Emitter class */ cEmitter = rb_define_class_under( rb_syck, "Emitter", rb_cObject ); rb_define_attr( cEmitter, "level", 1, 1 ); rb_define_alloc_func( cEmitter, syck_emitter_s_alloc ); rb_define_method( cEmitter, "initialize", syck_emitter_reset, -1 ); rb_define_method( cEmitter, "reset", syck_emitter_reset, -1 ); rb_define_method( cEmitter, "emit", syck_emitter_emit, -1 ); rb_define_method( cEmitter, "set_resolver", syck_emitter_set_resolver, 1); rb_define_method( cEmitter, "node_export", syck_emitter_node_export, 1); } ruby-mkrf-0.2.3.orig/test/sample_files/syck-0.55/ext/ruby/ext/syck/bytecode.c0000644000000000000000000005306511672453175025367 0ustar rootroot/* Generated by re2c 0.9.3 on Tue Apr 12 20:34:14 2005 */ #line 1 "bytecode.re" /* * bytecode.re * * $Author: why $ * $Date: 2005/04/13 06:27:54 $ * * Copyright (C) 2003 why the lucky stiff */ #include "syck.h" #include "gram.h" #define QUOTELEN 128 /* * They do my bidding... */ #define YYCTYPE char #define YYCURSOR parser->cursor #define YYMARKER parser->marker #define YYLIMIT parser->limit #define YYTOKEN parser->token #define YYTOKTMP parser->toktmp #define YYLINEPTR parser->lineptr #define YYLINECTPTR parser->linectptr #define YYLINE parser->linect #define YYFILL(n) syck_parser_read(parser) extern SyckParser *syck_parser_ptr; char *get_inline( SyckParser *parser ); /* * Repositions the cursor at `n' offset from the token start. * Only works in `Header' and `Document' sections. */ #define YYPOS(n) YYCURSOR = YYTOKEN + n /* * Track line numbers */ #define CHK_NL(ptr) if ( *( ptr - 1 ) == '\n' && ptr > YYLINECTPTR ) { YYLINEPTR = ptr; YYLINE++; YYLINECTPTR = YYLINEPTR; } /* * I like seeing the level operations as macros... */ #define ADD_LEVEL(len, status) syck_parser_add_level( parser, len, status ) #define POP_LEVEL() syck_parser_pop_level( parser ) #define CURRENT_LEVEL() syck_parser_current_level( parser ) /* * Force a token next time around sycklex() */ #define FORCE_NEXT_TOKEN(tok) parser->force_token = tok; /* * Adding levels in bytecode requires us to make sure * we've got all our tokens worked out. */ #define ADD_BYTE_LEVEL(lvl, len, s ) \ switch ( lvl->status ) \ { \ case syck_lvl_seq: \ lvl->ncount++; \ ADD_LEVEL(len, syck_lvl_open); \ YYPOS(0); \ return '-'; \ \ case syck_lvl_map: \ lvl->ncount++; \ ADD_LEVEL(len, s); \ break; \ \ case syck_lvl_open: \ lvl->status = s; \ break; \ \ default: \ ADD_LEVEL(len, s); \ break; \ } /* * Nice little macro to ensure we're YAML_IOPENed to the current level. * * Only use this macro in the "Document" section * */ #define ENSURE_YAML_IOPEN(last_lvl, lvl_type, to_len, reset) \ if ( last_lvl->spaces < to_len ) \ { \ if ( last_lvl->status == syck_lvl_iseq || last_lvl->status == syck_lvl_imap ) \ { \ goto Document; \ } \ else \ { \ ADD_LEVEL( to_len, lvl_type ); \ if ( reset == 1 ) YYPOS(0); \ return YAML_IOPEN; \ } \ } /* * Nice little macro to ensure closure of levels. * * Only use this macro in the "Document" section * */ #define ENSURE_YAML_IEND(last_lvl, to_len) \ if ( last_lvl->spaces > to_len ) \ { \ syck_parser_pop_level( parser ); \ YYPOS(0); \ return YAML_IEND; \ } /* * Concatenates string items and manages allocation * to the string */ #define CAT(s, c, i, l) \ { \ if ( i + 1 >= c ) \ { \ c += QUOTELEN; \ S_REALLOC_N( s, char, c ); \ } \ s[i++] = l; \ s[i] = '\0'; \ } /* * Parser for standard YAML Bytecode [UTF-8] */ int sycklex_bytecode_utf8( YYSTYPE *sycklval, SyckParser *parser ) { SyckLevel *lvl; int doc_level = 0; syck_parser_ptr = parser; if ( YYCURSOR == NULL ) { syck_parser_read( parser ); } if ( parser->force_token != 0 ) { int t = parser->force_token; parser->force_token = 0; return t; } #line 173 "bytecode.re" lvl = CURRENT_LEVEL(); if ( lvl->status == syck_lvl_doc ) { goto Document; } Header: YYTOKEN = YYCURSOR; #line 7 "" { YYCTYPE yych; unsigned int yyaccept; goto yy0; yy1: ++YYCURSOR; yy0: if((YYLIMIT - YYCURSOR) < 3) YYFILL(3); yych = *YYCURSOR; switch(yych){ case '\000': goto yy2; case 'D': goto yy3; default: goto yy5; } yy2: YYCURSOR = YYMARKER; switch(yyaccept){ case 0: goto yy4; } yy3: yyaccept = 0; yych = *(YYMARKER = ++YYCURSOR); switch(yych){ case '\n': goto yy6; case '\r': goto yy8; default: goto yy4; } yy4: #line 200 "bytecode.re" { YYPOS(0); goto Document; } #line 37 "" yy5: yych = *++YYCURSOR; goto yy4; yy6: ++YYCURSOR; goto yy7; yy7: #line 187 "bytecode.re" { if ( lvl->status == syck_lvl_header ) { CHK_NL(YYCURSOR); goto Directive; } else { ENSURE_YAML_IEND(lvl, -1); YYPOS(0); return 0; } } #line 56 "" yy8: ++YYCURSOR; switch((yych = *YYCURSOR)) { case '\n': goto yy6; default: goto yy2; } } #line 204 "bytecode.re" Document: { lvl = CURRENT_LEVEL(); if ( lvl->status == syck_lvl_header ) { lvl->status = syck_lvl_doc; } YYTOKEN = YYCURSOR; #line 65 "" { YYCTYPE yych; unsigned int yyaccept; goto yy9; yy10: ++YYCURSOR; yy9: if((YYLIMIT - YYCURSOR) < 3) YYFILL(3); yych = *YYCURSOR; switch(yych){ case '\000': goto yy30; case '\n': goto yy27; case '\r': goto yy29; case 'A': goto yy19; case 'D': goto yy12; case 'E': goto yy16; case 'M': goto yy14; case 'P': goto yy13; case 'Q': goto yy15; case 'R': goto yy21; case 'S': goto yy17; case 'T': goto yy23; case 'c': goto yy25; default: goto yy11; } yy11:yy12: yych = *++YYCURSOR; switch(yych){ case '\n': goto yy41; case '\r': goto yy44; default: goto yy11; } yy13: yych = *++YYCURSOR; switch(yych){ case '\n': goto yy41; case '\r': goto yy43; default: goto yy11; } yy14: yych = *++YYCURSOR; switch(yych){ case '\n': goto yy38; case '\r': goto yy40; default: goto yy11; } yy15: yych = *++YYCURSOR; switch(yych){ case '\n': goto yy35; case '\r': goto yy37; default: goto yy11; } yy16: yych = *++YYCURSOR; switch(yych){ case '\n': goto yy32; case '\r': goto yy34; default: goto yy11; } yy17: ++YYCURSOR; goto yy18; yy18: #line 289 "bytecode.re" { ADD_BYTE_LEVEL(lvl, lvl->spaces + 1, syck_lvl_str); goto Scalar; } #line 127 "" yy19: ++YYCURSOR; goto yy20; yy20: #line 293 "bytecode.re" { ADD_BYTE_LEVEL(lvl, lvl->spaces + 1, syck_lvl_open); sycklval->name = get_inline( parser ); syck_hdlr_remove_anchor( parser, sycklval->name ); CHK_NL(YYCURSOR); return YAML_ANCHOR; } #line 138 "" yy21: ++YYCURSOR; goto yy22; yy22: #line 300 "bytecode.re" { ADD_BYTE_LEVEL(lvl, lvl->spaces + 1, syck_lvl_str); sycklval->name = get_inline( parser ); POP_LEVEL(); if ( *( YYCURSOR - 1 ) == '\n' ) YYCURSOR--; return YAML_ALIAS; } #line 149 "" yy23: ++YYCURSOR; goto yy24; yy24: #line 307 "bytecode.re" { char *qstr; ADD_BYTE_LEVEL(lvl, lvl->spaces + 1, syck_lvl_open); qstr = get_inline( parser ); CHK_NL(YYCURSOR); if ( qstr[0] == '!' ) { int qidx = strlen( qstr ); if ( qstr[1] == '\0' ) { free( qstr ); return YAML_ITRANSFER; } lvl = CURRENT_LEVEL(); /* * URL Prefixing */ if ( qstr[1] == '^' ) { sycklval->name = S_ALLOC_N( char, qidx + strlen( lvl->domain ) ); sycklval->name[0] = '\0'; strcat( sycklval->name, lvl->domain ); strncat( sycklval->name, qstr + 2, qidx - 2 ); free( qstr ); } else { char *carat = qstr + 1; char *qend = qstr + qidx; while ( (++carat) < qend ) { if ( *carat == '^' ) break; } if ( carat < qend ) { free( lvl->domain ); lvl->domain = syck_strndup( qstr + 1, carat - ( qstr + 1 ) ); sycklval->name = S_ALLOC_N( char, ( qend - carat ) + strlen( lvl->domain ) ); sycklval->name[0] = '\0'; strcat( sycklval->name, lvl->domain ); strncat( sycklval->name, carat + 1, ( qend - carat ) - 1 ); free( qstr ); } else { sycklval->name = S_ALLOC_N( char, strlen( qstr ) ); sycklval->name[0] = '\0'; S_MEMCPY( sycklval->name, qstr + 1, char, strlen( qstr ) ); free( qstr ); } } return YAML_TRANSFER; } sycklval->name = qstr; return YAML_TAGURI; } #line 213 "" yy25: ++YYCURSOR; goto yy26; yy26: #line 367 "bytecode.re" { goto Comment; } #line 219 "" yy27: ++YYCURSOR; goto yy28; yy28: #line 369 "bytecode.re" { CHK_NL(YYCURSOR); if ( lvl->status == syck_lvl_seq ) { return YAML_INDENT; } else if ( lvl->status == syck_lvl_map ) { if ( lvl->ncount % 2 == 1 ) return ':'; else return YAML_INDENT; } goto Document; } #line 236 "" yy29: yych = *++YYCURSOR; switch(yych){ case '\n': goto yy27; default: goto yy11; } yy30: ++YYCURSOR; goto yy31; yy31: #line 382 "bytecode.re" { ENSURE_YAML_IEND(lvl, -1); YYPOS(0); return 0; } #line 250 "" yy32: ++YYCURSOR; goto yy33; yy33: #line 253 "bytecode.re" { if ( lvl->status == syck_lvl_seq && lvl->ncount == 0 ) { lvl->ncount++; YYPOS(0); FORCE_NEXT_TOKEN( ']' ); return '['; } else if ( lvl->status == syck_lvl_map && lvl->ncount == 0 ) { lvl->ncount++; YYPOS(0); FORCE_NEXT_TOKEN( '}' ); return '{'; } POP_LEVEL(); lvl = CURRENT_LEVEL(); if ( lvl->status == syck_lvl_seq ) { FORCE_NEXT_TOKEN(YAML_INDENT); } else if ( lvl->status == syck_lvl_map ) { if ( lvl->ncount % 2 == 1 ) { FORCE_NEXT_TOKEN(':'); } else { FORCE_NEXT_TOKEN(YAML_INDENT); } } CHK_NL(YYCURSOR); return YAML_IEND; } #line 290 "" yy34: yych = *++YYCURSOR; switch(yych){ case '\n': goto yy32; default: goto yy11; } yy35: ++YYCURSOR; goto yy36; yy36: #line 238 "bytecode.re" { int complex = 0; if ( lvl->ncount % 2 == 0 && ( lvl->status == syck_lvl_map || lvl->status == syck_lvl_seq ) ) { complex = 1; } ADD_BYTE_LEVEL(lvl, lvl->spaces + 1, syck_lvl_seq); CHK_NL(YYCURSOR); if ( complex ) { FORCE_NEXT_TOKEN( YAML_IOPEN ); return '?'; } return YAML_IOPEN; } #line 314 "" yy37: yych = *++YYCURSOR; switch(yych){ case '\n': goto yy35; default: goto yy11; } yy38: ++YYCURSOR; goto yy39; yy39: #line 223 "bytecode.re" { int complex = 0; if ( lvl->ncount % 2 == 0 && ( lvl->status == syck_lvl_map || lvl->status == syck_lvl_seq ) ) { complex = 1; } ADD_BYTE_LEVEL(lvl, lvl->spaces + 1, syck_lvl_map); CHK_NL(YYCURSOR); if ( complex ) { FORCE_NEXT_TOKEN( YAML_IOPEN ); return '?'; } return YAML_IOPEN; } #line 338 "" yy40: yych = *++YYCURSOR; switch(yych){ case '\n': goto yy38; default: goto yy11; } yy41: ++YYCURSOR; goto yy42; yy42: #line 218 "bytecode.re" { ENSURE_YAML_IEND(lvl, -1); YYPOS(0); return 0; } #line 352 "" yy43: yych = *++YYCURSOR; switch(yych){ case '\n': goto yy41; default: goto yy11; } yy44: ++YYCURSOR; switch((yych = *YYCURSOR)) { case '\n': goto yy41; default: goto yy11; } } #line 387 "bytecode.re" } Directive: { YYTOKEN = YYCURSOR; #line 366 "" { YYCTYPE yych; unsigned int yyaccept; goto yy45; yy46: ++YYCURSOR; yy45: if((YYLIMIT - YYCURSOR) < 2) YYFILL(2); yych = *YYCURSOR; switch(yych){ case '\000': goto yy47; case 'V': goto yy48; default: goto yy50; } yy47: YYCURSOR = YYMARKER; switch(yyaccept){ case 0: goto yy49; } yy48: yyaccept = 0; yych = *(YYMARKER = ++YYCURSOR); switch(yych){ case '.': case '/': case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': case ':': case ';': case '<': case '=': case '>': case '?': case '@': case 'A': case 'B': case 'C': case 'D': case 'E': case 'F': case 'G': case 'H': case 'I': case 'J': case 'K': case 'L': case 'M': case 'N': case 'O': case 'P': case 'Q': case 'R': case 'S': case 'T': case 'U': case 'V': case 'W': case 'X': case 'Y': case 'Z': case '[': case '\\': case ']': case '^': case '_': case 'a': case 'b': case 'c': case 'd': case 'e': case 'f': case 'g': case 'h': case 'i': case 'j': case 'k': case 'l': case 'm': case 'n': case 'o': case 'p': case 'q': case 'r': case 's': case 't': case 'u': case 'v': case 'w': case 'x': case 'y': case 'z': goto yy51; default: goto yy49; } yy49: #line 400 "bytecode.re" { YYCURSOR = YYTOKEN; return YAML_DOCSEP; } #line 469 "" yy50: yych = *++YYCURSOR; goto yy49; yy51: ++YYCURSOR; if(YYLIMIT == YYCURSOR) YYFILL(1); yych = *YYCURSOR; goto yy52; yy52: switch(yych){ case '.': case '/': case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': case ';': case '<': case '=': case '>': case '?': case '@': case 'A': case 'B': case 'C': case 'D': case 'E': case 'F': case 'G': case 'H': case 'I': case 'J': case 'K': case 'L': case 'M': case 'N': case 'O': case 'P': case 'Q': case 'R': case 'S': case 'T': case 'U': case 'V': case 'W': case 'X': case 'Y': case 'Z': case '[': case '\\': case ']': case '^': case '_': case 'a': case 'b': case 'c': case 'd': case 'e': case 'f': case 'g': case 'h': case 'i': case 'j': case 'k': case 'l': case 'm': case 'n': case 'o': case 'p': case 'q': case 'r': case 's': case 't': case 'u': case 'v': case 'w': case 'x': case 'y': case 'z': goto yy51; case ':': goto yy53; default: goto yy47; } yy53: yych = *++YYCURSOR; switch(yych){ case '.': case '/': case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': case ':': case ';': case '<': case '=': case '>': case '?': case '@': case 'A': case 'B': case 'C': case 'D': case 'E': case 'F': case 'G': case 'H': case 'I': case 'J': case 'K': case 'L': case 'M': case 'N': case 'O': case 'P': case 'Q': case 'R': case 'S': case 'T': case 'U': case 'V': case 'W': case 'X': case 'Y': case 'Z': case '[': case '\\': case ']': case '^': case '_': case 'a': case 'b': case 'c': case 'd': case 'e': case 'f': case 'g': case 'h': case 'i': case 'j': case 'k': case 'l': case 'm': case 'n': case 'o': case 'p': case 'q': case 'r': case 's': case 't': case 'u': case 'v': case 'w': case 'x': case 'y': case 'z': goto yy54; default: goto yy47; } yy54: ++YYCURSOR; if(YYLIMIT == YYCURSOR) YYFILL(1); yych = *YYCURSOR; goto yy55; yy55: switch(yych){ case '\n': goto yy56; case '\r': goto yy58; case '.': case '/': case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': case ':': case ';': case '<': case '=': case '>': case '?': case '@': case 'A': case 'B': case 'C': case 'D': case 'E': case 'F': case 'G': case 'H': case 'I': case 'J': case 'K': case 'L': case 'M': case 'N': case 'O': case 'P': case 'Q': case 'R': case 'S': case 'T': case 'U': case 'V': case 'W': case 'X': case 'Y': case 'Z': case '[': case '\\': case ']': case '^': case '_': case 'a': case 'b': case 'c': case 'd': case 'e': case 'f': case 'g': case 'h': case 'i': case 'j': case 'k': case 'l': case 'm': case 'n': case 'o': case 'p': case 'q': case 'r': case 's': case 't': case 'u': case 'v': case 'w': case 'x': case 'y': case 'z': goto yy54; default: goto yy47; } yy56: ++YYCURSOR; goto yy57; yy57: #line 397 "bytecode.re" { CHK_NL(YYCURSOR); goto Directive; } #line 724 "" yy58: ++YYCURSOR; switch((yych = *YYCURSOR)) { case '\n': goto yy56; default: goto yy47; } } #line 403 "bytecode.re" } Comment: { YYTOKEN = YYCURSOR; #line 733 "" { YYCTYPE yych; unsigned int yyaccept; goto yy59; yy60: ++YYCURSOR; yy59: if((YYLIMIT - YYCURSOR) < 2) YYFILL(2); yych = *YYCURSOR; switch(yych){ case '\000': goto yy61; case '\n': goto yy62; case '\r': goto yy64; default: goto yy66; } yy61:yy62: ++YYCURSOR; goto yy63; yy63: #line 413 "bytecode.re" { CHK_NL(YYCURSOR); goto Document; } #line 754 "" yy64: ++YYCURSOR; switch((yych = *YYCURSOR)) { case '\n': goto yy67; default: goto yy65; } yy65: #line 416 "bytecode.re" { goto Comment; } #line 763 "" yy66: yych = *++YYCURSOR; goto yy65; yy67: ++YYCURSOR; yych = *YYCURSOR; goto yy63; } #line 418 "bytecode.re" } Scalar: { int idx = 0; int cap = 100; char *str = S_ALLOC_N( char, cap ); char *tok; str[0] = '\0'; Scalar2: tok = YYCURSOR; #line 771 "" { YYCTYPE yych; unsigned int yyaccept; goto yy68; yy69: ++YYCURSOR; yy68: if((YYLIMIT - YYCURSOR) < 3) YYFILL(3); yych = *YYCURSOR; switch(yych){ case '\000': goto yy74; case '\n': goto yy70; case '\r': goto yy72; default: goto yy76; } yy70: ++YYCURSOR; switch((yych = *YYCURSOR)) { case 'C': goto yy78; case 'N': goto yy80; case 'Z': goto yy83; default: goto yy71; } yy71: #line 462 "bytecode.re" { YYCURSOR = tok; goto ScalarEnd; } #line 798 "" yy72: ++YYCURSOR; switch((yych = *YYCURSOR)) { case '\n': goto yy77; default: goto yy73; } yy73: #line 470 "bytecode.re" { CAT(str, cap, idx, tok[0]); goto Scalar2; } #line 809 "" yy74: ++YYCURSOR; goto yy75; yy75: #line 466 "bytecode.re" { YYCURSOR = tok; goto ScalarEnd; } #line 817 "" yy76: yych = *++YYCURSOR; goto yy73; yy77: yych = *++YYCURSOR; switch(yych){ case 'C': goto yy78; case 'N': goto yy80; case 'Z': goto yy83; default: goto yy71; } yy78: ++YYCURSOR; goto yy79; yy79: #line 436 "bytecode.re" { CHK_NL(tok+1); goto Scalar2; } #line 833 "" yy80: ++YYCURSOR; if(YYLIMIT == YYCURSOR) YYFILL(1); yych = *YYCURSOR; goto yy81; yy81: switch(yych){ case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': goto yy80; default: goto yy82; } yy82: #line 439 "bytecode.re" { CHK_NL(tok+1); if ( tok + 2 < YYCURSOR ) { char *count = tok + 2; int total = strtod( count, NULL ); int i; for ( i = 0; i < total; i++ ) { CAT(str, cap, idx, '\n'); } } else { CAT(str, cap, idx, '\n'); } goto Scalar2; } #line 871 "" yy83: ++YYCURSOR; goto yy84; yy84: #line 457 "bytecode.re" { CHK_NL(tok+1); CAT(str, cap, idx, '\0'); goto Scalar2; } #line 880 "" } #line 474 "bytecode.re" ScalarEnd: { SyckNode *n = syck_alloc_str(); n->data.str->ptr = str; n->data.str->len = idx; sycklval->nodeData = n; POP_LEVEL(); if ( parser->implicit_typing == 1 ) { try_tag_implicit( sycklval->nodeData, parser->taguri_expansion ); } return YAML_PLAIN; } } } char * get_inline( SyckParser *parser ) { int idx = 0; int cap = 100; char *str = S_ALLOC_N( char, cap ); char *tok; str[0] = '\0'; Inline: { tok = YYCURSOR; #line 884 "" { YYCTYPE yych; unsigned int yyaccept; goto yy85; yy86: ++YYCURSOR; yy85: if((YYLIMIT - YYCURSOR) < 2) YYFILL(2); yych = *YYCURSOR; switch(yych){ case '\000': goto yy91; case '\n': goto yy87; case '\r': goto yy89; default: goto yy93; } yy87: ++YYCURSOR; goto yy88; yy88: #line 509 "bytecode.re" { CHK_NL(YYCURSOR); return str; } #line 905 "" yy89: ++YYCURSOR; switch((yych = *YYCURSOR)) { case '\n': goto yy94; default: goto yy90; } yy90: #line 516 "bytecode.re" { CAT(str, cap, idx, tok[0]); goto Inline; } #line 916 "" yy91: ++YYCURSOR; goto yy92; yy92: #line 512 "bytecode.re" { YYCURSOR = tok; return str; } #line 924 "" yy93: yych = *++YYCURSOR; goto yy90; yy94: ++YYCURSOR; yych = *YYCURSOR; goto yy88; } #line 520 "bytecode.re" } } ruby-mkrf-0.2.3.orig/test/sample_files/syck-0.55/ext/ruby/ext/syck/implicit.c0000644000000000000000000013171511672453175025402 0ustar rootroot/* Generated by re2c 0.9.3 on Wed Mar 30 08:27:25 2005 */ #line 1 "implicit.re" /* * implicit.re * * $Author: why $ * $Date: 2005/04/06 17:18:59 $ * * Copyright (C) 2003 why the lucky stiff */ #include "syck.h" #define YYCTYPE char #define YYCURSOR cursor #define YYMARKER marker #define YYLIMIT limit #define YYFILL(n) void try_tag_implicit( SyckNode *n, int taguri ) { char *tid = ""; switch ( n->kind ) { case syck_str_kind: tid = syck_match_implicit( n->data.str->ptr, n->data.str->len ); break; case syck_seq_kind: tid = "seq"; break; case syck_map_kind: tid = "map"; break; } if ( n->type_id != NULL ) S_FREE( n->type_id ); if ( taguri == 1 ) { n->type_id = syck_taguri( YAML_DOMAIN, tid, strlen( tid ) ); } else { n->type_id = syck_strndup( tid, strlen( tid ) ); } } char *syck_match_implicit( char *str, size_t len ) { char *cursor, *limit, *marker; cursor = str; limit = str + len; #line 6 "" { YYCTYPE yych; unsigned int yyaccept; goto yy0; yy1: ++YYCURSOR; yy0: if((YYLIMIT - YYCURSOR) < 26) YYFILL(26); yych = *YYCURSOR; switch(yych){ case '\000': goto yy6; case '+': goto yy16; case '-': goto yy17; case '.': goto yy20; case '0': goto yy18; case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': goto yy19; case '<': goto yy22; case '=': goto yy21; case 'F': goto yy15; case 'N': goto yy5; case 'O': goto yy13; case 'T': goto yy11; case 'Y': goto yy9; case 'f': goto yy14; case 'n': goto yy4; case 'o': goto yy12; case 't': goto yy10; case 'y': goto yy8; case '~': goto yy2; default: goto yy23; } yy2: ++YYCURSOR; if((yych = *YYCURSOR) <= '\000') goto yy6; goto yy3; yy3: #line 123 "implicit.re" { return "str"; } #line 51 "" yy4: yyaccept = 0; yych = *(YYMARKER = ++YYCURSOR); switch(yych){ case 'o': goto yy172; case 'u': goto yy200; default: goto yy3; } yy5: yyaccept = 0; yych = *(YYMARKER = ++YYCURSOR); switch(yych){ case 'O': case 'o': goto yy172; case 'U': goto yy195; case 'u': goto yy196; default: goto yy3; } yy6: ++YYCURSOR; goto yy7; yy7: #line 85 "implicit.re" { return "null"; } #line 72 "" yy8: yyaccept = 0; yych = *(YYMARKER = ++YYCURSOR); switch(yych){ case 'e': goto yy194; default: goto yy3; } yy9: yyaccept = 0; yych = *(YYMARKER = ++YYCURSOR); switch(yych){ case 'E': goto yy192; case 'e': goto yy193; default: goto yy3; } yy10: yyaccept = 0; yych = *(YYMARKER = ++YYCURSOR); switch(yych){ case 'r': goto yy190; default: goto yy3; } yy11: yyaccept = 0; yych = *(YYMARKER = ++YYCURSOR); switch(yych){ case 'R': goto yy186; case 'r': goto yy187; default: goto yy3; } yy12: yyaccept = 0; yych = *(YYMARKER = ++YYCURSOR); switch(yych){ case 'f': goto yy185; case 'n': goto yy182; default: goto yy3; } yy13: yyaccept = 0; yych = *(YYMARKER = ++YYCURSOR); switch(yych){ case 'F': goto yy180; case 'N': case 'n': goto yy182; case 'f': goto yy181; default: goto yy3; } yy14: yyaccept = 0; yych = *(YYMARKER = ++YYCURSOR); switch(yych){ case 'a': goto yy177; default: goto yy3; } yy15: yyaccept = 0; yych = *(YYMARKER = ++YYCURSOR); switch(yych){ case 'A': goto yy168; case 'a': goto yy169; default: goto yy3; } yy16: yyaccept = 0; yych = *(YYMARKER = ++YYCURSOR); switch(yych){ case '.': goto yy167; case '0': goto yy158; case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': goto yy47; default: goto yy3; } yy17: yyaccept = 0; yych = *(YYMARKER = ++YYCURSOR); switch(yych){ case '.': goto yy157; case '0': goto yy158; case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': goto yy47; default: goto yy3; } yy18: yyaccept = 0; yych = *(YYMARKER = ++YYCURSOR); switch(yych){ case '\000': goto yy52; case ',': goto yy142; case '.': goto yy50; case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': goto yy140; case '8': case '9': goto yy141; case ':': goto yy49; case 'x': goto yy144; default: goto yy3; } yy19: yyaccept = 0; yych = *(YYMARKER = ++YYCURSOR); switch(yych){ case '\000': goto yy52; case ',': goto yy47; case '.': goto yy50; case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': goto yy46; case ':': goto yy49; default: goto yy3; } yy20: yyaccept = 0; yych = *(YYMARKER = ++YYCURSOR); switch(yych){ case 'I': goto yy33; case 'N': goto yy31; case 'i': goto yy32; case 'n': goto yy30; default: goto yy3; } yy21: yych = *++YYCURSOR; if(yych <= '\000') goto yy28; goto yy3; yy22: yyaccept = 0; yych = *(YYMARKER = ++YYCURSOR); switch(yych){ case '<': goto yy24; default: goto yy3; } yy23: yych = *++YYCURSOR; goto yy3; yy24: yych = *++YYCURSOR; if(yych <= '\000') goto yy26; goto yy25; yy25: YYCURSOR = YYMARKER; switch(yyaccept){ case 0: goto yy3; } yy26: ++YYCURSOR; goto yy27; yy27: #line 121 "implicit.re" { return "merge"; } #line 230 "" yy28: ++YYCURSOR; goto yy29; yy29: #line 119 "implicit.re" { return "default"; } #line 236 "" yy30: yych = *++YYCURSOR; switch(yych){ case 'a': goto yy45; default: goto yy25; } yy31: yych = *++YYCURSOR; switch(yych){ case 'A': goto yy40; case 'a': goto yy41; default: goto yy25; } yy32: yych = *++YYCURSOR; switch(yych){ case 'n': goto yy39; default: goto yy25; } yy33: yych = *++YYCURSOR; switch(yych){ case 'N': goto yy34; case 'n': goto yy35; default: goto yy25; } yy34: yych = *++YYCURSOR; switch(yych){ case 'F': goto yy36; default: goto yy25; } yy35: yych = *++YYCURSOR; switch(yych){ case 'f': goto yy36; default: goto yy25; } yy36: yych = *++YYCURSOR; if(yych >= '\001') goto yy25; goto yy37; yy37: ++YYCURSOR; goto yy38; yy38: #line 105 "implicit.re" { return "float#inf"; } #line 277 "" yy39: yych = *++YYCURSOR; switch(yych){ case 'f': goto yy36; default: goto yy25; } yy40: yych = *++YYCURSOR; switch(yych){ case 'N': goto yy42; default: goto yy25; } yy41: yych = *++YYCURSOR; switch(yych){ case 'N': goto yy42; default: goto yy25; } yy42: yych = *++YYCURSOR; if(yych >= '\001') goto yy25; goto yy43; yy43: ++YYCURSOR; goto yy44; yy44: #line 109 "implicit.re" { return "float#nan"; } #line 301 "" yy45: yych = *++YYCURSOR; switch(yych){ case 'n': goto yy42; default: goto yy25; } yy46: yych = *++YYCURSOR; switch(yych){ case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': goto yy74; default: goto yy48; } yy47: ++YYCURSOR; if(YYLIMIT == YYCURSOR) YYFILL(1); yych = *YYCURSOR; goto yy48; yy48: switch(yych){ case '\000': goto yy52; case ',': case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': goto yy47; case '.': goto yy50; case ':': goto yy49; default: goto yy25; } yy49: ++YYCURSOR; if(YYLIMIT == YYCURSOR) YYFILL(1); yych = *YYCURSOR; switch(yych){ case '0': case '1': case '2': case '3': case '4': case '5': goto yy66; case '6': case '7': case '8': case '9': goto yy67; default: goto yy25; } yy50: ++YYCURSOR; if(YYLIMIT == YYCURSOR) YYFILL(1); yych = *YYCURSOR; goto yy51; yy51: switch(yych){ case '\000': goto yy56; case ',': goto yy54; case '.': goto yy58; case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': goto yy50; case 'E': case 'e': goto yy60; default: goto yy25; } yy52: ++YYCURSOR; goto yy53; yy53: #line 97 "implicit.re" { return "int"; } #line 386 "" yy54: ++YYCURSOR; if(YYLIMIT == YYCURSOR) YYFILL(1); yych = *YYCURSOR; goto yy55; yy55: switch(yych){ case '\000': goto yy56; case ',': case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': goto yy54; default: goto yy25; } yy56: ++YYCURSOR; goto yy57; yy57: #line 99 "implicit.re" { return "float#fix"; } #line 411 "" yy58: ++YYCURSOR; if(YYLIMIT == YYCURSOR) YYFILL(1); yych = *YYCURSOR; goto yy59; yy59: switch(yych){ case '.': case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': goto yy58; case 'E': case 'e': goto yy60; default: goto yy25; } yy60: yych = *++YYCURSOR; switch(yych){ case '+': case '-': goto yy61; default: goto yy25; } yy61: yych = *++YYCURSOR; if(yych <= '\000') goto yy25; goto yy63; yy62: ++YYCURSOR; if(YYLIMIT == YYCURSOR) YYFILL(1); yych = *YYCURSOR; goto yy63; yy63: switch(yych){ case '\000': goto yy64; case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': goto yy62; default: goto yy25; } yy64: ++YYCURSOR; goto yy65; yy65: #line 101 "implicit.re" { return "float#exp"; } #line 463 "" yy66: ++YYCURSOR; if(YYLIMIT == YYCURSOR) YYFILL(1); yych = *YYCURSOR; switch(yych){ case '\000': goto yy70; case '.': goto yy68; case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': goto yy67; case ':': goto yy49; default: goto yy25; } yy67: ++YYCURSOR; if(YYLIMIT == YYCURSOR) YYFILL(1); yych = *YYCURSOR; switch(yych){ case '\000': goto yy70; case '.': goto yy68; case ':': goto yy49; default: goto yy25; } yy68: ++YYCURSOR; if(YYLIMIT == YYCURSOR) YYFILL(1); yych = *YYCURSOR; goto yy69; yy69: switch(yych){ case '\000': goto yy72; case ',': case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': goto yy68; default: goto yy25; } yy70: ++YYCURSOR; goto yy71; yy71: #line 95 "implicit.re" { return "int#base60"; } #line 518 "" yy72: ++YYCURSOR; goto yy73; yy73: #line 103 "implicit.re" { return "float#base60"; } #line 524 "" yy74: yych = *++YYCURSOR; switch(yych){ case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': goto yy75; default: goto yy48; } yy75: yych = *++YYCURSOR; switch(yych){ case '-': goto yy76; default: goto yy48; } yy76: yych = *++YYCURSOR; switch(yych){ case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': goto yy77; default: goto yy25; } yy77: yych = *++YYCURSOR; switch(yych){ case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': goto yy78; default: goto yy25; } yy78: yych = *++YYCURSOR; switch(yych){ case '-': goto yy79; default: goto yy25; } yy79: yych = *++YYCURSOR; switch(yych){ case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': goto yy80; default: goto yy25; } yy80: yych = *++YYCURSOR; switch(yych){ case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': goto yy81; default: goto yy25; } yy81: yych = *++YYCURSOR; switch(yych){ case '\000': goto yy82; case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': goto yy25; case 'T': goto yy84; case 't': goto yy85; default: goto yy87; } yy82: ++YYCURSOR; goto yy83; yy83: #line 111 "implicit.re" { return "timestamp#ymd"; } #line 627 "" yy84: yych = *++YYCURSOR; switch(yych){ case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': goto yy126; default: goto yy25; } yy85: yych = *++YYCURSOR; switch(yych){ case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': goto yy108; default: goto yy25; } yy86: ++YYCURSOR; if(YYLIMIT == YYCURSOR) YYFILL(1); yych = *YYCURSOR; goto yy87; yy87: switch(yych){ case '\t': case ' ': goto yy86; case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': goto yy88; default: goto yy25; } yy88: yych = *++YYCURSOR; switch(yych){ case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': goto yy89; default: goto yy25; } yy89: yych = *++YYCURSOR; switch(yych){ case ':': goto yy90; default: goto yy25; } yy90: yych = *++YYCURSOR; switch(yych){ case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': goto yy91; default: goto yy25; } yy91: yych = *++YYCURSOR; switch(yych){ case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': goto yy92; default: goto yy25; } yy92: yych = *++YYCURSOR; switch(yych){ case ':': goto yy93; default: goto yy25; } yy93: yych = *++YYCURSOR; switch(yych){ case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': goto yy94; default: goto yy25; } yy94: yych = *++YYCURSOR; switch(yych){ case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': goto yy95; default: goto yy25; } yy95: yych = *++YYCURSOR; switch(yych){ case '\t': case ' ': goto yy98; case '.': goto yy96; default: goto yy25; } yy96: ++YYCURSOR; if(YYLIMIT == YYCURSOR) YYFILL(1); yych = *YYCURSOR; goto yy97; yy97: switch(yych){ case '\t': case ' ': goto yy98; case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': goto yy96; default: goto yy25; } yy98: ++YYCURSOR; if(YYLIMIT == YYCURSOR) YYFILL(1); yych = *YYCURSOR; goto yy99; yy99: switch(yych){ case '\t': case ' ': goto yy98; case '+': case '-': goto yy101; case 'Z': goto yy100; default: goto yy25; } yy100: yych = *++YYCURSOR; if(yych <= '\000') goto yy105; goto yy25; yy101: yych = *++YYCURSOR; switch(yych){ case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': goto yy102; default: goto yy25; } yy102: yych = *++YYCURSOR; switch(yych){ case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': goto yy103; default: goto yy25; } yy103: yych = *++YYCURSOR; switch(yych){ case '\000': goto yy105; case ':': goto yy104; default: goto yy25; } yy104: yych = *++YYCURSOR; switch(yych){ case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': goto yy107; default: goto yy25; } yy105: ++YYCURSOR; goto yy106; yy106: #line 115 "implicit.re" { return "timestamp#spaced"; } #line 847 "" yy107: yych = *++YYCURSOR; switch(yych){ case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': goto yy100; default: goto yy25; } yy108: yych = *++YYCURSOR; switch(yych){ case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': goto yy109; default: goto yy25; } yy109: yych = *++YYCURSOR; switch(yych){ case ':': goto yy110; default: goto yy25; } yy110: yych = *++YYCURSOR; switch(yych){ case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': goto yy111; default: goto yy25; } yy111: yych = *++YYCURSOR; switch(yych){ case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': goto yy112; default: goto yy25; } yy112: yych = *++YYCURSOR; switch(yych){ case ':': goto yy113; default: goto yy25; } yy113: yych = *++YYCURSOR; switch(yych){ case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': goto yy114; default: goto yy25; } yy114: yych = *++YYCURSOR; switch(yych){ case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': goto yy115; default: goto yy25; } yy115: yych = *++YYCURSOR; switch(yych){ case '.': goto yy116; case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': goto yy25; default: goto yy117; } yy116: ++YYCURSOR; if(YYLIMIT == YYCURSOR) YYFILL(1); yych = *YYCURSOR; goto yy117; yy117: switch(yych){ case '+': case '-': goto yy119; case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': goto yy116; case 'Z': goto yy118; default: goto yy25; } yy118: yych = *++YYCURSOR; if(yych <= '\000') goto yy123; goto yy25; yy119: yych = *++YYCURSOR; switch(yych){ case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': goto yy120; default: goto yy25; } yy120: yych = *++YYCURSOR; switch(yych){ case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': goto yy121; default: goto yy25; } yy121: yych = *++YYCURSOR; switch(yych){ case '\000': goto yy123; case ':': goto yy122; default: goto yy25; } yy122: yych = *++YYCURSOR; switch(yych){ case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': goto yy125; default: goto yy25; } yy123: ++YYCURSOR; goto yy124; yy124: #line 113 "implicit.re" { return "timestamp#iso8601"; } #line 1033 "" yy125: yych = *++YYCURSOR; switch(yych){ case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': goto yy118; default: goto yy25; } yy126: yych = *++YYCURSOR; switch(yych){ case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': goto yy127; default: goto yy25; } yy127: yych = *++YYCURSOR; switch(yych){ case ':': goto yy128; default: goto yy25; } yy128: yych = *++YYCURSOR; switch(yych){ case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': goto yy129; default: goto yy25; } yy129: yych = *++YYCURSOR; switch(yych){ case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': goto yy130; default: goto yy25; } yy130: yych = *++YYCURSOR; switch(yych){ case ':': goto yy131; default: goto yy25; } yy131: yych = *++YYCURSOR; switch(yych){ case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': goto yy132; default: goto yy25; } yy132: yych = *++YYCURSOR; switch(yych){ case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': goto yy133; default: goto yy25; } yy133: yych = *++YYCURSOR; switch(yych){ case '.': goto yy134; case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': goto yy25; case 'Z': goto yy136; default: goto yy135; } yy134: ++YYCURSOR; if((YYLIMIT - YYCURSOR) < 7) YYFILL(7); yych = *YYCURSOR; goto yy135; yy135: switch(yych){ case '+': case '-': goto yy119; case '0': goto yy134; case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': goto yy138; case 'Z': goto yy118; default: goto yy25; } yy136: yych = *++YYCURSOR; if(yych >= '\001') goto yy25; goto yy137; yy137: yych = *++YYCURSOR; goto yy124; yy138: ++YYCURSOR; if((YYLIMIT - YYCURSOR) < 7) YYFILL(7); yych = *YYCURSOR; goto yy139; yy139: switch(yych){ case '+': case '-': goto yy119; case '0': goto yy134; case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': goto yy138; case 'Z': goto yy136; default: goto yy25; } yy140: yych = *++YYCURSOR; switch(yych){ case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': goto yy155; case '8': case '9': goto yy153; default: goto yy143; } yy141: yych = *++YYCURSOR; switch(yych){ case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': goto yy153; default: goto yy152; } yy142: ++YYCURSOR; if(YYLIMIT == YYCURSOR) YYFILL(1); yych = *YYCURSOR; goto yy143; yy143: switch(yych){ case '\000': goto yy149; case ',': case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': goto yy142; case '.': goto yy50; case '8': case '9': goto yy151; case ':': goto yy49; default: goto yy25; } yy144: yych = *++YYCURSOR; if(yych <= '\000') goto yy25; goto yy146; yy145: ++YYCURSOR; if(YYLIMIT == YYCURSOR) YYFILL(1); yych = *YYCURSOR; goto yy146; yy146: switch(yych){ case '\000': goto yy147; case ',': case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': case 'A': case 'B': case 'C': case 'D': case 'E': case 'F': case 'a': case 'b': case 'c': case 'd': case 'e': case 'f': goto yy145; default: goto yy25; } yy147: ++YYCURSOR; goto yy148; yy148: #line 91 "implicit.re" { return "int#hex"; } #line 1275 "" yy149: ++YYCURSOR; goto yy150; yy150: #line 93 "implicit.re" { return "int#oct"; } #line 1281 "" yy151: ++YYCURSOR; if(YYLIMIT == YYCURSOR) YYFILL(1); yych = *YYCURSOR; goto yy152; yy152: switch(yych){ case ',': case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': goto yy151; case '.': goto yy50; case ':': goto yy49; default: goto yy25; } yy153: yych = *++YYCURSOR; switch(yych){ case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': goto yy154; default: goto yy152; } yy154: yych = *++YYCURSOR; switch(yych){ case '-': goto yy76; default: goto yy152; } yy155: yych = *++YYCURSOR; switch(yych){ case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': goto yy156; case '8': case '9': goto yy154; default: goto yy143; } yy156: yych = *++YYCURSOR; switch(yych){ case '-': goto yy76; default: goto yy143; } yy157: yych = *++YYCURSOR; switch(yych){ case 'I': goto yy160; case 'i': goto yy159; default: goto yy25; } yy158: yych = *++YYCURSOR; switch(yych){ case '\000': goto yy52; case 'x': goto yy144; default: goto yy143; } yy159: yych = *++YYCURSOR; switch(yych){ case 'n': goto yy166; default: goto yy25; } yy160: yych = *++YYCURSOR; switch(yych){ case 'N': goto yy161; case 'n': goto yy162; default: goto yy25; } yy161: yych = *++YYCURSOR; switch(yych){ case 'F': goto yy163; default: goto yy25; } yy162: yych = *++YYCURSOR; switch(yych){ case 'f': goto yy163; default: goto yy25; } yy163: yych = *++YYCURSOR; if(yych >= '\001') goto yy25; goto yy164; yy164: ++YYCURSOR; goto yy165; yy165: #line 107 "implicit.re" { return "float#neginf"; } #line 1381 "" yy166: yych = *++YYCURSOR; switch(yych){ case 'f': goto yy163; default: goto yy25; } yy167: yych = *++YYCURSOR; switch(yych){ case 'I': goto yy33; case 'i': goto yy32; default: goto yy25; } yy168: yych = *++YYCURSOR; switch(yych){ case 'L': goto yy175; default: goto yy25; } yy169: yych = *++YYCURSOR; switch(yych){ case 'l': goto yy170; default: goto yy25; } yy170: yych = *++YYCURSOR; switch(yych){ case 's': goto yy171; default: goto yy25; } yy171: yych = *++YYCURSOR; switch(yych){ case 'e': goto yy172; default: goto yy25; } yy172: yych = *++YYCURSOR; if(yych >= '\001') goto yy25; goto yy173; yy173: ++YYCURSOR; goto yy174; yy174: #line 89 "implicit.re" { return "bool#no"; } #line 1421 "" yy175: yych = *++YYCURSOR; switch(yych){ case 'S': goto yy176; default: goto yy25; } yy176: yych = *++YYCURSOR; switch(yych){ case 'E': goto yy172; default: goto yy25; } yy177: yych = *++YYCURSOR; switch(yych){ case 'l': goto yy178; default: goto yy25; } yy178: yych = *++YYCURSOR; switch(yych){ case 's': goto yy179; default: goto yy25; } yy179: yych = *++YYCURSOR; switch(yych){ case 'e': goto yy172; default: goto yy25; } yy180: yych = *++YYCURSOR; switch(yych){ case 'F': goto yy172; default: goto yy25; } yy181: yych = *++YYCURSOR; switch(yych){ case 'f': goto yy172; default: goto yy25; } yy182: yych = *++YYCURSOR; if(yych >= '\001') goto yy25; goto yy183; yy183: ++YYCURSOR; goto yy184; yy184: #line 87 "implicit.re" { return "bool#yes"; } #line 1465 "" yy185: yych = *++YYCURSOR; switch(yych){ case 'f': goto yy172; default: goto yy25; } yy186: yych = *++YYCURSOR; switch(yych){ case 'U': goto yy189; default: goto yy25; } yy187: yych = *++YYCURSOR; switch(yych){ case 'u': goto yy188; default: goto yy25; } yy188: yych = *++YYCURSOR; switch(yych){ case 'e': goto yy182; default: goto yy25; } yy189: yych = *++YYCURSOR; switch(yych){ case 'E': goto yy182; default: goto yy25; } yy190: yych = *++YYCURSOR; switch(yych){ case 'u': goto yy191; default: goto yy25; } yy191: yych = *++YYCURSOR; switch(yych){ case 'e': goto yy182; default: goto yy25; } yy192: yych = *++YYCURSOR; switch(yych){ case 'S': goto yy182; default: goto yy25; } yy193: yych = *++YYCURSOR; switch(yych){ case 's': goto yy182; default: goto yy25; } yy194: yych = *++YYCURSOR; switch(yych){ case 's': goto yy182; default: goto yy25; } yy195: yych = *++YYCURSOR; switch(yych){ case 'L': goto yy199; default: goto yy25; } yy196: yych = *++YYCURSOR; switch(yych){ case 'l': goto yy197; default: goto yy25; } yy197: yych = *++YYCURSOR; switch(yych){ case 'l': goto yy198; default: goto yy25; } yy198: yych = *++YYCURSOR; if(yych <= '\000') goto yy6; goto yy25; yy199: yych = *++YYCURSOR; switch(yych){ case 'L': goto yy198; default: goto yy25; } yy200: yych = *++YYCURSOR; switch(yych){ case 'l': goto yy201; default: goto yy25; } yy201: ++YYCURSOR; switch((yych = *YYCURSOR)) { case 'l': goto yy198; default: goto yy25; } } #line 125 "implicit.re" } /* Remove ending fragment and compare types */ int syck_tagcmp( char *tag1, char *tag2 ) { if ( tag1 == tag2 ) return 1; if ( tag1 == NULL || tag2 == NULL ) return 0; else { int i; char *othorpe; char *tmp1 = syck_strndup( tag1, strlen( tag1 ) ); char *tmp2 = syck_strndup( tag2, strlen( tag2 ) ); othorpe = strstr( tmp1, "#" ); if ( othorpe != NULL ) { othorpe[0] = '\0'; } othorpe = strstr( tmp2, "#" ); if ( othorpe != NULL ) { othorpe[0] = '\0'; } i = strcmp( tmp1, tmp2 ); S_FREE( tmp1 ); S_FREE( tmp2 ); return i; } } char * syck_type_id_to_uri( char *type_id ) { char *cursor, *limit, *marker; cursor = type_id; limit = type_id + strlen( type_id ); #line 1552 "" { YYCTYPE yych; unsigned int yyaccept; goto yy202; yy203: ++YYCURSOR; yy202: if((YYLIMIT - YYCURSOR) < 21) YYFILL(21); yych = *YYCURSOR; switch(yych){ case '\000': goto yy204; case '!': goto yy208; case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': case 'A': case 'B': case 'C': case 'D': case 'E': case 'F': case 'G': case 'H': case 'I': case 'J': case 'K': case 'L': case 'M': case 'N': case 'O': case 'P': case 'Q': case 'R': case 'S': case 'T': case 'U': case 'V': case 'W': case 'X': case 'Y': case 'Z': case '_': case 'a': case 'b': case 'c': case 'd': case 'e': case 'f': case 'g': case 'h': case 'i': case 'j': case 'k': case 'l': case 'm': case 'n': case 'o': case 'p': case 'q': case 'r': case 's': case 'u': case 'v': case 'w': case 'y': case 'z': goto yy210; case 't': goto yy205; case 'x': goto yy207; default: goto yy211; } yy204: YYCURSOR = YYMARKER; switch(yyaccept){ case 0: goto yy206; } yy205: yyaccept = 0; yych = *(YYMARKER = ++YYCURSOR); switch(yych){ case ',': goto yy216; case '-': goto yy212; case '.': goto yy217; case '/': goto yy218; case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': case 'A': case 'B': case 'C': case 'D': case 'E': case 'F': case 'G': case 'H': case 'I': case 'J': case 'K': case 'L': case 'M': case 'N': case 'O': case 'P': case 'Q': case 'R': case 'S': case 'T': case 'U': case 'V': case 'W': case 'X': case 'Y': case 'Z': case '_': case 'b': case 'c': case 'd': case 'e': case 'f': case 'g': case 'h': case 'i': case 'j': case 'k': case 'l': case 'm': case 'n': case 'o': case 'p': case 'q': case 'r': case 's': case 't': case 'u': case 'v': case 'w': case 'x': case 'y': case 'z': goto yy214; case 'a': goto yy246; default: goto yy206; } yy206: #line 202 "implicit.re" { return syck_taguri( YAML_DOMAIN, type_id, strlen( type_id ) ); } #line 1700 "" yy207: yyaccept = 0; yych = *(YYMARKER = ++YYCURSOR); switch(yych){ case ',': case '.': case '/': case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': case 'A': case 'B': case 'C': case 'D': case 'E': case 'F': case 'G': case 'H': case 'I': case 'J': case 'K': case 'L': case 'M': case 'N': case 'O': case 'P': case 'Q': case 'R': case 'S': case 'T': case 'U': case 'V': case 'W': case 'X': case 'Y': case 'Z': case '_': case 'a': case 'b': case 'c': case 'd': case 'e': case 'f': case 'g': case 'h': case 'i': case 'j': case 'k': case 'l': case 'm': case 'n': case 'o': case 'p': case 'q': case 'r': case 's': case 't': case 'u': case 'v': case 'w': case 'x': case 'y': case 'z': goto yy215; case '-': goto yy236; default: goto yy206; } yy208: ++YYCURSOR; goto yy209; yy209: #line 176 "implicit.re" { return syck_xprivate( type_id + 1, strlen( type_id ) - 1 ); } #line 1774 "" yy210: yyaccept = 0; yych = *(YYMARKER = ++YYCURSOR); switch(yych){ case ',': goto yy216; case '-': goto yy212; case '.': goto yy217; case '/': goto yy218; case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': case 'A': case 'B': case 'C': case 'D': case 'E': case 'F': case 'G': case 'H': case 'I': case 'J': case 'K': case 'L': case 'M': case 'N': case 'O': case 'P': case 'Q': case 'R': case 'S': case 'T': case 'U': case 'V': case 'W': case 'X': case 'Y': case 'Z': case '_': case 'a': case 'b': case 'c': case 'd': case 'e': case 'f': case 'g': case 'h': case 'i': case 'j': case 'k': case 'l': case 'm': case 'n': case 'o': case 'p': case 'q': case 'r': case 's': case 't': case 'u': case 'v': case 'w': case 'x': case 'y': case 'z': goto yy214; default: goto yy206; } yy211: yych = *++YYCURSOR; goto yy206; yy212: ++YYCURSOR; if(YYLIMIT == YYCURSOR) YYFILL(1); yych = *YYCURSOR; goto yy213; yy213: switch(yych){ case '-': goto yy212; case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': case 'A': case 'B': case 'C': case 'D': case 'E': case 'F': case 'G': case 'H': case 'I': case 'J': case 'K': case 'L': case 'M': case 'N': case 'O': case 'P': case 'Q': case 'R': case 'S': case 'T': case 'U': case 'V': case 'W': case 'X': case 'Y': case 'Z': case '_': case 'a': case 'b': case 'c': case 'd': case 'e': case 'f': case 'g': case 'h': case 'i': case 'j': case 'k': case 'l': case 'm': case 'n': case 'o': case 'p': case 'q': case 'r': case 's': case 't': case 'u': case 'v': case 'w': case 'x': case 'y': case 'z': goto yy214; default: goto yy204; } yy214: ++YYCURSOR; if(YYLIMIT == YYCURSOR) YYFILL(1); yych = *YYCURSOR; goto yy215; yy215: switch(yych){ case ',': goto yy216; case '-': goto yy212; case '.': goto yy217; case '/': goto yy218; case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': case 'A': case 'B': case 'C': case 'D': case 'E': case 'F': case 'G': case 'H': case 'I': case 'J': case 'K': case 'L': case 'M': case 'N': case 'O': case 'P': case 'Q': case 'R': case 'S': case 'T': case 'U': case 'V': case 'W': case 'X': case 'Y': case 'Z': case '_': case 'a': case 'b': case 'c': case 'd': case 'e': case 'f': case 'g': case 'h': case 'i': case 'j': case 'k': case 'l': case 'm': case 'n': case 'o': case 'p': case 'q': case 'r': case 's': case 't': case 'u': case 'v': case 'w': case 'x': case 'y': case 'z': goto yy214; default: goto yy204; } yy216: yych = *++YYCURSOR; switch(yych){ case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': goto yy224; default: goto yy204; } yy217: ++YYCURSOR; if(YYLIMIT == YYCURSOR) YYFILL(1); yych = *YYCURSOR; switch(yych){ case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': case 'A': case 'B': case 'C': case 'D': case 'E': case 'F': case 'G': case 'H': case 'I': case 'J': case 'K': case 'L': case 'M': case 'N': case 'O': case 'P': case 'Q': case 'R': case 'S': case 'T': case 'U': case 'V': case 'W': case 'X': case 'Y': case 'Z': case '_': case 'a': case 'b': case 'c': case 'd': case 'e': case 'f': case 'g': case 'h': case 'i': case 'j': case 'k': case 'l': case 'm': case 'n': case 'o': case 'p': case 'q': case 'r': case 's': case 't': case 'u': case 'v': case 'w': case 'x': case 'y': case 'z': goto yy220; default: goto yy204; } yy218: ++YYCURSOR; goto yy219; yy219: #line 178 "implicit.re" { char *domain = S_ALLOC_N( char, ( YYCURSOR - type_id ) + 15 ); char *uri; domain[0] = '\0'; strncat( domain, type_id, ( YYCURSOR - type_id ) - 1 ); strcat( domain, "." ); strcat( domain, YAML_DOMAIN ); uri = syck_taguri( domain, YYCURSOR, YYLIMIT - YYCURSOR ); S_FREE( domain ); return uri; } #line 2084 "" yy220: ++YYCURSOR; if((YYLIMIT - YYCURSOR) < 2) YYFILL(2); yych = *YYCURSOR; goto yy221; yy221: switch(yych){ case ',': goto yy216; case '-': goto yy222; case '.': goto yy217; case '/': goto yy218; case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': case 'A': case 'B': case 'C': case 'D': case 'E': case 'F': case 'G': case 'H': case 'I': case 'J': case 'K': case 'L': case 'M': case 'N': case 'O': case 'P': case 'Q': case 'R': case 'S': case 'T': case 'U': case 'V': case 'W': case 'X': case 'Y': case 'Z': case '_': case 'a': case 'b': case 'c': case 'd': case 'e': case 'f': case 'g': case 'h': case 'i': case 'j': case 'k': case 'l': case 'm': case 'n': case 'o': case 'p': case 'q': case 'r': case 's': case 't': case 'u': case 'v': case 'w': case 'x': case 'y': case 'z': goto yy220; default: goto yy204; } yy222: ++YYCURSOR; if(YYLIMIT == YYCURSOR) YYFILL(1); yych = *YYCURSOR; goto yy223; yy223: switch(yych){ case '-': goto yy222; case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': case 'A': case 'B': case 'C': case 'D': case 'E': case 'F': case 'G': case 'H': case 'I': case 'J': case 'K': case 'L': case 'M': case 'N': case 'O': case 'P': case 'Q': case 'R': case 'S': case 'T': case 'U': case 'V': case 'W': case 'X': case 'Y': case 'Z': case '_': case 'a': case 'b': case 'c': case 'd': case 'e': case 'f': case 'g': case 'h': case 'i': case 'j': case 'k': case 'l': case 'm': case 'n': case 'o': case 'p': case 'q': case 'r': case 's': case 't': case 'u': case 'v': case 'w': case 'x': case 'y': case 'z': goto yy220; default: goto yy204; } yy224: yych = *++YYCURSOR; switch(yych){ case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': goto yy225; default: goto yy204; } yy225: yych = *++YYCURSOR; switch(yych){ case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': goto yy226; default: goto yy204; } yy226: yych = *++YYCURSOR; switch(yych){ case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': goto yy227; default: goto yy204; } yy227: yych = *++YYCURSOR; switch(yych){ case '-': goto yy228; case '/': goto yy229; default: goto yy204; } yy228: yych = *++YYCURSOR; switch(yych){ case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': goto yy231; default: goto yy204; } yy229: ++YYCURSOR; goto yy230; yy230: #line 191 "implicit.re" { char *domain = S_ALLOC_N( char, YYCURSOR - type_id ); char *uri; domain[0] = '\0'; strncat( domain, type_id, ( YYCURSOR - type_id ) - 1 ); uri = syck_taguri( domain, YYCURSOR, YYLIMIT - YYCURSOR ); S_FREE( domain ); return uri; } #line 2302 "" yy231: yych = *++YYCURSOR; switch(yych){ case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': goto yy232; default: goto yy204; } yy232: yych = *++YYCURSOR; switch(yych){ case '-': goto yy233; case '/': goto yy229; default: goto yy204; } yy233: yych = *++YYCURSOR; switch(yych){ case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': goto yy234; default: goto yy204; } yy234: yych = *++YYCURSOR; switch(yych){ case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': goto yy235; default: goto yy204; } yy235: yych = *++YYCURSOR; switch(yych){ case '/': goto yy229; default: goto yy204; } yy236: yych = *++YYCURSOR; switch(yych){ case 'p': goto yy237; default: goto yy213; } yy237: yych = *++YYCURSOR; switch(yych){ case ',': goto yy216; case '.': goto yy217; case '/': goto yy218; case 'r': goto yy238; default: goto yy213; } yy238: yych = *++YYCURSOR; switch(yych){ case ',': goto yy216; case '.': goto yy217; case '/': goto yy218; case 'i': goto yy239; default: goto yy213; } yy239: yych = *++YYCURSOR; switch(yych){ case ',': goto yy216; case '.': goto yy217; case '/': goto yy218; case 'v': goto yy240; default: goto yy213; } yy240: yych = *++YYCURSOR; switch(yych){ case ',': goto yy216; case '.': goto yy217; case '/': goto yy218; case 'a': goto yy241; default: goto yy213; } yy241: yych = *++YYCURSOR; switch(yych){ case ',': goto yy216; case '.': goto yy217; case '/': goto yy218; case 't': goto yy242; default: goto yy213; } yy242: yych = *++YYCURSOR; switch(yych){ case ',': goto yy216; case '.': goto yy217; case '/': goto yy218; case 'e': goto yy243; default: goto yy213; } yy243: yych = *++YYCURSOR; switch(yych){ case ',': goto yy216; case '.': goto yy217; case '/': goto yy218; case ':': goto yy244; default: goto yy213; } yy244: ++YYCURSOR; goto yy245; yy245: #line 174 "implicit.re" { return type_id; } #line 2422 "" yy246: yych = *++YYCURSOR; switch(yych){ case ',': goto yy216; case '.': goto yy217; case '/': goto yy218; case 'g': goto yy247; default: goto yy213; } yy247: yych = *++YYCURSOR; switch(yych){ case ',': goto yy216; case '.': goto yy217; case '/': goto yy218; case ':': goto yy248; default: goto yy213; } yy248: yych = *++YYCURSOR; switch(yych){ case ',': case '-': case '.': goto yy204; default: goto yy250; } yy249: ++YYCURSOR; if(YYLIMIT == YYCURSOR) YYFILL(1); yych = *YYCURSOR; goto yy250; yy250: switch(yych){ case ',': goto yy253; case '-': goto yy251; case '.': goto yy254; case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': case 'A': case 'B': case 'C': case 'D': case 'E': case 'F': case 'G': case 'H': case 'I': case 'J': case 'K': case 'L': case 'M': case 'N': case 'O': case 'P': case 'Q': case 'R': case 'S': case 'T': case 'U': case 'V': case 'W': case 'X': case 'Y': case 'Z': case '_': case 'a': case 'b': case 'c': case 'd': case 'e': case 'f': case 'g': case 'h': case 'i': case 'j': case 'k': case 'l': case 'm': case 'n': case 'o': case 'p': case 'q': case 'r': case 's': case 't': case 'u': case 'v': case 'w': case 'x': case 'y': case 'z': goto yy249; default: goto yy204; } yy251: ++YYCURSOR; if(YYLIMIT == YYCURSOR) YYFILL(1); yych = *YYCURSOR; goto yy252; yy252: switch(yych){ case '-': goto yy251; case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': case 'A': case 'B': case 'C': case 'D': case 'E': case 'F': case 'G': case 'H': case 'I': case 'J': case 'K': case 'L': case 'M': case 'N': case 'O': case 'P': case 'Q': case 'R': case 'S': case 'T': case 'U': case 'V': case 'W': case 'X': case 'Y': case 'Z': case '_': case 'a': case 'b': case 'c': case 'd': case 'e': case 'f': case 'g': case 'h': case 'i': case 'j': case 'k': case 'l': case 'm': case 'n': case 'o': case 'p': case 'q': case 'r': case 's': case 't': case 'u': case 'v': case 'w': case 'x': case 'y': case 'z': goto yy249; default: goto yy204; } yy253: yych = *++YYCURSOR; switch(yych){ case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': goto yy259; default: goto yy204; } yy254: ++YYCURSOR; if(YYLIMIT == YYCURSOR) YYFILL(1); yych = *YYCURSOR; switch(yych){ case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': case 'A': case 'B': case 'C': case 'D': case 'E': case 'F': case 'G': case 'H': case 'I': case 'J': case 'K': case 'L': case 'M': case 'N': case 'O': case 'P': case 'Q': case 'R': case 'S': case 'T': case 'U': case 'V': case 'W': case 'X': case 'Y': case 'Z': case '_': case 'a': case 'b': case 'c': case 'd': case 'e': case 'f': case 'g': case 'h': case 'i': case 'j': case 'k': case 'l': case 'm': case 'n': case 'o': case 'p': case 'q': case 'r': case 's': case 't': case 'u': case 'v': case 'w': case 'x': case 'y': case 'z': goto yy255; default: goto yy204; } yy255: ++YYCURSOR; if((YYLIMIT - YYCURSOR) < 2) YYFILL(2); yych = *YYCURSOR; goto yy256; yy256: switch(yych){ case ',': goto yy253; case '-': goto yy257; case '.': goto yy254; case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': case 'A': case 'B': case 'C': case 'D': case 'E': case 'F': case 'G': case 'H': case 'I': case 'J': case 'K': case 'L': case 'M': case 'N': case 'O': case 'P': case 'Q': case 'R': case 'S': case 'T': case 'U': case 'V': case 'W': case 'X': case 'Y': case 'Z': case '_': case 'a': case 'b': case 'c': case 'd': case 'e': case 'f': case 'g': case 'h': case 'i': case 'j': case 'k': case 'l': case 'm': case 'n': case 'o': case 'p': case 'q': case 'r': case 's': case 't': case 'u': case 'v': case 'w': case 'x': case 'y': case 'z': goto yy255; default: goto yy204; } yy257: ++YYCURSOR; if(YYLIMIT == YYCURSOR) YYFILL(1); yych = *YYCURSOR; goto yy258; yy258: switch(yych){ case '-': goto yy257; case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': case 'A': case 'B': case 'C': case 'D': case 'E': case 'F': case 'G': case 'H': case 'I': case 'J': case 'K': case 'L': case 'M': case 'N': case 'O': case 'P': case 'Q': case 'R': case 'S': case 'T': case 'U': case 'V': case 'W': case 'X': case 'Y': case 'Z': case '_': case 'a': case 'b': case 'c': case 'd': case 'e': case 'f': case 'g': case 'h': case 'i': case 'j': case 'k': case 'l': case 'm': case 'n': case 'o': case 'p': case 'q': case 'r': case 's': case 't': case 'u': case 'v': case 'w': case 'x': case 'y': case 'z': goto yy255; default: goto yy204; } yy259: yych = *++YYCURSOR; switch(yych){ case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': goto yy260; default: goto yy204; } yy260: yych = *++YYCURSOR; switch(yych){ case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': goto yy261; default: goto yy204; } yy261: yych = *++YYCURSOR; switch(yych){ case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': goto yy262; default: goto yy204; } yy262: yych = *++YYCURSOR; switch(yych){ case '-': goto yy263; case ':': goto yy264; default: goto yy204; } yy263: yych = *++YYCURSOR; switch(yych){ case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': goto yy266; default: goto yy204; } yy264: ++YYCURSOR; goto yy265; yy265: #line 172 "implicit.re" { return type_id; } #line 2874 "" yy266: yych = *++YYCURSOR; switch(yych){ case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': goto yy267; default: goto yy204; } yy267: yych = *++YYCURSOR; switch(yych){ case '-': goto yy268; case ':': goto yy264; default: goto yy204; } yy268: yych = *++YYCURSOR; switch(yych){ case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': goto yy269; default: goto yy204; } yy269: yych = *++YYCURSOR; switch(yych){ case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': goto yy270; default: goto yy204; } yy270: ++YYCURSOR; switch((yych = *YYCURSOR)) { case ':': goto yy264; default: goto yy204; } } #line 204 "implicit.re" } ruby-mkrf-0.2.3.orig/test/sample_files/syck-0.55/ext/ruby/ext/syck/token.c0000644000000000000000000016166211672453175024714 0ustar rootroot/* Generated by re2c 0.9.3 on Tue Apr 12 21:11:14 2005 */ #line 1 "token.re" /* * token.re * * $Author: why $ * $Date: 2005/04/13 06:27:54 $ * * Copyright (C) 2003 why the lucky stiff */ #include "syck.h" #include "gram.h" /* * Allocate quoted strings in chunks */ #define QUOTELEN 1024 /* * They do my bidding... */ #define YYCTYPE char #define YYCURSOR parser->cursor #define YYMARKER parser->marker #define YYLIMIT parser->limit #define YYTOKEN parser->token #define YYTOKTMP parser->toktmp #define YYLINEPTR parser->lineptr #define YYLINECTPTR parser->linectptr #define YYLINE parser->linect #define YYFILL(n) syck_parser_read(parser) /* * Repositions the cursor at `n' offset from the token start. * Only works in `Header' and `Document' sections. */ #define YYPOS(n) YYCURSOR = YYTOKEN + n /* * Track line numbers */ #define NEWLINE(ptr) YYLINEPTR = ptr + newline_len(ptr); if ( YYLINEPTR > YYLINECTPTR ) { YYLINE++; YYLINECTPTR = YYLINEPTR; } /* * I like seeing the level operations as macros... */ #define ADD_LEVEL(len, status) syck_parser_add_level( parser, len, status ) #define POP_LEVEL() syck_parser_pop_level( parser ) #define CURRENT_LEVEL() syck_parser_current_level( parser ) /* * Force a token next time around sycklex() */ #define FORCE_NEXT_TOKEN(tok) parser->force_token = tok; /* * Nice little macro to ensure we're YAML_IOPENed to the current level. * * Only use this macro in the "Document" section * */ #define ENSURE_YAML_IOPEN(last_lvl, to_len, reset) \ if ( last_lvl->spaces < to_len ) \ { \ if ( last_lvl->status == syck_lvl_iseq || last_lvl->status == syck_lvl_imap ) \ { \ goto Document; \ } \ else \ { \ ADD_LEVEL( to_len, syck_lvl_doc ); \ if ( reset == 1 ) YYPOS(0); \ return YAML_IOPEN; \ } \ } /* * Nice little macro to ensure closure of levels. * * Only use this macro in the "Document" section * */ #define ENSURE_YAML_IEND(last_lvl, to_len) \ if ( last_lvl->spaces > to_len ) \ { \ syck_parser_pop_level( parser ); \ YYPOS(0); \ return YAML_IEND; \ } /* * Concatenates quoted string items and manages allocation * to the quoted string */ #define QUOTECAT(s, c, i, l) \ { \ if ( i + 1 >= c ) \ { \ c += QUOTELEN; \ S_REALLOC_N( s, char, c ); \ } \ s[i++] = l; \ s[i] = '\0'; \ } #define QUOTECATS(s, c, i, cs, cl) \ { \ while ( i + cl >= c ) \ { \ c += QUOTELEN; \ S_REALLOC_N( s, char, c ); \ } \ S_MEMCPY( s + i, cs, char, cl ); \ i += cl; \ s[i] = '\0'; \ } /* * Tags a plain scalar with a transfer method * * Use only in "Plain" section * */ #define RETURN_IMPLICIT() \ { \ SyckNode *n = syck_alloc_str(); \ YYCURSOR = YYTOKEN; \ n->data.str->ptr = qstr; \ n->data.str->len = qidx; \ n->data.str->style = scalar_plain; \ sycklval->nodeData = n; \ if ( parser->implicit_typing == 1 ) \ { \ try_tag_implicit( sycklval->nodeData, parser->taguri_expansion ); \ } \ return YAML_PLAIN; \ } /* concat the inline characters to the plain scalar */ #define PLAIN_NOT_INL() \ if ( *(YYCURSOR - 1) == ' ' || is_newline( YYCURSOR - 1 ) ) \ { \ YYCURSOR--; \ } \ QUOTECATS(qstr, qcapa, qidx, YYTOKEN, YYCURSOR - YYTOKEN); \ goto Plain2; /* trim spaces off the end in case of indent */ #define PLAIN_IS_INL() \ char *walker = qstr + qidx - 1; \ while ( walker > qstr && ( *walker == '\n' || *walker == ' ' ) ) \ { \ qidx--; \ walker[0] = '\0'; \ walker--; \ } /* * Keep or chomp block? * * Use only in "ScalarBlock" section * */ #define RETURN_YAML_BLOCK() \ { \ SyckNode *n = syck_alloc_str(); \ if ( ((SyckParser *)parser)->taguri_expansion == 1 ) \ { \ n->type_id = syck_taguri( YAML_DOMAIN, "str", 3 ); \ } \ else \ { \ n->type_id = syck_strndup( "str", 3 ); \ } \ n->data.str->ptr = qstr; \ n->data.str->len = qidx; \ if ( blockType == BLOCK_LIT ) { \ n->data.str->style = scalar_literal; \ } else { \ n->data.str->style = scalar_fold; \ } \ if ( qidx > 0 ) \ { \ if ( nlDoWhat != NL_KEEP ) \ { \ char *fc = n->data.str->ptr + n->data.str->len - 1; \ while ( is_newline( fc ) ) fc--; \ if ( nlDoWhat != NL_CHOMP && fc < n->data.str->ptr + n->data.str->len - 1 ) \ fc += 1; \ n->data.str->len = fc - n->data.str->ptr + 1; \ } \ } \ sycklval->nodeData = n; \ return YAML_BLOCK; \ } /* * Handles newlines, calculates indent */ #define GOBBLE_UP_YAML_INDENT( ict, start ) \ char *indent = start; \ NEWLINE(indent); \ while ( indent < YYCURSOR ) \ { \ if ( is_newline( ++indent ) ) \ { \ NEWLINE(indent); \ } \ } \ ict = 0; \ if ( *YYCURSOR == '\0' ) \ { \ ict = -1; \ start = YYCURSOR - 1; \ } \ else if ( *YYLINEPTR == ' ' ) \ { \ ict = YYCURSOR - YYLINEPTR; \ } /* * If an indent exists at the current level, back up. */ #define GET_TRUE_YAML_INDENT(indt_len) \ { \ SyckLevel *lvl_deep = CURRENT_LEVEL(); \ indt_len = lvl_deep->spaces; \ if ( indt_len == YYTOKEN - YYLINEPTR ) \ { \ SyckLevel *lvl_over; \ parser->lvl_idx--; \ lvl_over = CURRENT_LEVEL(); \ indt_len = lvl_over->spaces; \ parser->lvl_idx++; \ } \ } /* * Argjh! I hate globals! Here for syckerror() only! */ SyckParser *syck_parser_ptr = NULL; /* * Accessory funcs later in this file. */ void eat_comments( SyckParser * ); char escape_seq( char ); int is_newline( char *ptr ); int newline_len( char *ptr ); int sycklex_yaml_utf8( YYSTYPE *, SyckParser * ); int sycklex_bytecode_utf8( YYSTYPE *, SyckParser * ); int syckwrap(); /* * My own re-entrant sycklex() using re2c. * You really get used to the limited regexp. * It's really nice to not rely on backtracking and such. */ int sycklex( YYSTYPE *sycklval, SyckParser *parser ) { switch ( parser->input_type ) { case syck_yaml_utf8: return sycklex_yaml_utf8( sycklval, parser ); case syck_yaml_utf16: syckerror( "UTF-16 is not currently supported in Syck.\nPlease contribute code to help this happen!" ); break; case syck_yaml_utf32: syckerror( "UTF-32 is not currently supported in Syck.\nPlease contribute code to help this happen!" ); break; case syck_bytecode_utf8: return sycklex_bytecode_utf8( sycklval, parser ); } } /* * Parser for standard YAML [UTF-8] */ int sycklex_yaml_utf8( YYSTYPE *sycklval, SyckParser *parser ) { int doc_level = 0; syck_parser_ptr = parser; if ( YYCURSOR == NULL ) { syck_parser_read( parser ); } if ( parser->force_token != 0 ) { int t = parser->force_token; parser->force_token = 0; return t; } #line 312 "token.re" if ( YYLINEPTR != YYCURSOR ) { goto Document; } Header: YYTOKEN = YYCURSOR; #line 7 "" { YYCTYPE yych; unsigned int yyaccept; goto yy0; yy1: ++YYCURSOR; yy0: if((YYLIMIT - YYCURSOR) < 5) YYFILL(5); yych = *YYCURSOR; switch(yych){ case '\000': goto yy7; case '\n': goto yy9; case '\r': goto yy11; case ' ': goto yy12; case '#': goto yy5; case '-': goto yy2; case '.': goto yy4; default: goto yy14; } yy2: yyaccept = 0; yych = *(YYMARKER = ++YYCURSOR); switch(yych){ case '-': goto yy28; default: goto yy3; } yy3: #line 371 "token.re" { YYPOS(0); goto Document; } #line 37 "" yy4: yyaccept = 0; yych = *(YYMARKER = ++YYCURSOR); switch(yych){ case '.': goto yy21; default: goto yy3; } yy5: ++YYCURSOR; goto yy6; yy6: #line 353 "token.re" { eat_comments( parser ); goto Header; } #line 51 "" yy7: ++YYCURSOR; goto yy8; yy8: #line 357 "token.re" { SyckLevel *lvl = CURRENT_LEVEL(); ENSURE_YAML_IEND(lvl, -1); YYPOS(0); return 0; } #line 61 "" yy9: yyaccept = 1; yych = *(YYMARKER = ++YYCURSOR); goto yy18; yy10: #line 363 "token.re" { GOBBLE_UP_YAML_INDENT( doc_level, YYTOKEN ); goto Header; } #line 70 "" yy11: yych = *++YYCURSOR; switch(yych){ case '\n': goto yy17; default: goto yy3; } yy12: ++YYCURSOR; yych = *YYCURSOR; goto yy16; yy13: #line 367 "token.re" { doc_level = YYCURSOR - YYLINEPTR; goto Header; } #line 83 "" yy14: yych = *++YYCURSOR; goto yy3; yy15: ++YYCURSOR; if(YYLIMIT == YYCURSOR) YYFILL(1); yych = *YYCURSOR; goto yy16; yy16: switch(yych){ case ' ': goto yy15; default: goto yy13; } yy17: yyaccept = 1; YYMARKER = ++YYCURSOR; if(YYLIMIT == YYCURSOR) YYFILL(1); yych = *YYCURSOR; goto yy18; yy18: switch(yych){ case '\n': case ' ': goto yy17; case '\r': goto yy19; default: goto yy10; } yy19: ++YYCURSOR; if(YYLIMIT == YYCURSOR) YYFILL(1); yych = *YYCURSOR; switch(yych){ case '\n': goto yy17; default: goto yy20; } yy20: YYCURSOR = YYMARKER; switch(yyaccept){ case 1: goto yy10; case 0: goto yy3; } yy21: yych = *++YYCURSOR; switch(yych){ case '.': goto yy22; default: goto yy20; } yy22: yych = *++YYCURSOR; switch(yych){ case '\n': goto yy23; case '\r': goto yy27; case ' ': goto yy25; default: goto yy20; } yy23: ++YYCURSOR; goto yy24; yy24: #line 339 "token.re" { SyckLevel *lvl = CURRENT_LEVEL(); if ( lvl->status == syck_lvl_header ) { goto Header; } else { ENSURE_YAML_IEND(lvl, -1); YYPOS(0); return 0; } return 0; } #line 147 "" yy25: ++YYCURSOR; if(YYLIMIT == YYCURSOR) YYFILL(1); yych = *YYCURSOR; goto yy26; yy26: switch(yych){ case ' ': goto yy25; default: goto yy24; } yy27: yych = *++YYCURSOR; switch(yych){ case '\n': goto yy23; default: goto yy20; } yy28: yych = *++YYCURSOR; switch(yych){ case '-': goto yy29; default: goto yy20; } yy29: yych = *++YYCURSOR; switch(yych){ case '\n': goto yy30; case '\r': goto yy34; case ' ': goto yy32; default: goto yy20; } yy30: ++YYCURSOR; goto yy31; yy31: #line 325 "token.re" { SyckLevel *lvl = CURRENT_LEVEL(); if ( lvl->status == syck_lvl_header ) { YYPOS(3); goto Directive; } else { ENSURE_YAML_IEND(lvl, -1); YYPOS(0); return 0; } } #line 191 "" yy32: ++YYCURSOR; if(YYLIMIT == YYCURSOR) YYFILL(1); yych = *YYCURSOR; goto yy33; yy33: switch(yych){ case ' ': goto yy32; default: goto yy31; } yy34: ++YYCURSOR; switch((yych = *YYCURSOR)) { case '\n': goto yy30; default: goto yy20; } } #line 375 "token.re" Document: { SyckLevel *lvl = CURRENT_LEVEL(); if ( lvl->status == syck_lvl_header ) { lvl->status = syck_lvl_doc; } YYTOKEN = YYCURSOR; #line 209 "" { YYCTYPE yych; unsigned int yyaccept; goto yy35; yy36: ++YYCURSOR; yy35: if((YYLIMIT - YYCURSOR) < 3) YYFILL(3); yych = *YYCURSOR; switch(yych){ case '\000': goto yy62; case '\n': goto yy37; case '\r': goto yy39; case ' ': goto yy60; case '!': goto yy51; case '"': goto yy55; case '#': goto yy58; case '&': goto yy49; case '\'': goto yy53; case '*': goto yy50; case ',': case ':': goto yy47; case '-': case '?': goto yy48; case '>': case '|': goto yy57; case '[': goto yy41; case ']': case '}': goto yy45; case '{': goto yy43; default: goto yy64; } yy37: yyaccept = 0; yych = *(YYMARKER = ++YYCURSOR); goto yy92; yy38: #line 389 "token.re" { /* Isolate spaces */ int indt_len; GOBBLE_UP_YAML_INDENT( indt_len, YYTOKEN ); lvl = CURRENT_LEVEL(); doc_level = 0; /* XXX: Comment lookahead */ if ( *YYCURSOR == '#' ) { goto Document; } /* Ignore indentation inside inlines */ if ( lvl->status == syck_lvl_iseq || lvl->status == syck_lvl_imap ) { goto Document; } /* Check for open indent */ ENSURE_YAML_IEND(lvl, indt_len); ENSURE_YAML_IOPEN(lvl, indt_len, 0); if ( indt_len == -1 ) { return 0; } return YAML_INDENT; } #line 269 "" yy39: ++YYCURSOR; switch((yych = *YYCURSOR)) { case '\n': goto yy91; default: goto yy40; } yy40: #line 493 "token.re" { ENSURE_YAML_IOPEN(lvl, doc_level, 1); goto Plain; } #line 280 "" yy41: ++YYCURSOR; goto yy42; yy42: #line 417 "token.re" { ENSURE_YAML_IOPEN(lvl, doc_level, 1); lvl = CURRENT_LEVEL(); ADD_LEVEL(lvl->spaces + 1, syck_lvl_iseq); return YYTOKEN[0]; } #line 290 "" yy43: ++YYCURSOR; goto yy44; yy44: #line 423 "token.re" { ENSURE_YAML_IOPEN(lvl, doc_level, 1); lvl = CURRENT_LEVEL(); ADD_LEVEL(lvl->spaces + 1, syck_lvl_imap); return YYTOKEN[0]; } #line 300 "" yy45: ++YYCURSOR; goto yy46; yy46: #line 429 "token.re" { POP_LEVEL(); return YYTOKEN[0]; } #line 308 "" yy47: yyaccept = 1; yych = *(YYMARKER = ++YYCURSOR); switch(yych){ case '\n': goto yy86; case '\r': goto yy90; case ' ': goto yy88; default: goto yy40; } yy48: yyaccept = 1; yych = *(YYMARKER = ++YYCURSOR); switch(yych){ case '\n': goto yy81; case '\r': goto yy85; case ' ': goto yy83; default: goto yy40; } yy49: yych = *++YYCURSOR; switch(yych){ case '-': case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': case 'A': case 'B': case 'C': case 'D': case 'E': case 'F': case 'G': case 'H': case 'I': case 'J': case 'K': case 'L': case 'M': case 'N': case 'O': case 'P': case 'Q': case 'R': case 'S': case 'T': case 'U': case 'V': case 'W': case 'X': case 'Y': case 'Z': case '_': case 'a': case 'b': case 'c': case 'd': case 'e': case 'f': case 'g': case 'h': case 'i': case 'j': case 'k': case 'l': case 'm': case 'n': case 'o': case 'p': case 'q': case 'r': case 's': case 't': case 'u': case 'v': case 'w': case 'x': case 'y': case 'z': goto yy78; default: goto yy40; } yy50: yych = *++YYCURSOR; switch(yych){ case '-': case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': case 'A': case 'B': case 'C': case 'D': case 'E': case 'F': case 'G': case 'H': case 'I': case 'J': case 'K': case 'L': case 'M': case 'N': case 'O': case 'P': case 'Q': case 'R': case 'S': case 'T': case 'U': case 'V': case 'W': case 'X': case 'Y': case 'Z': case '_': case 'a': case 'b': case 'c': case 'd': case 'e': case 'f': case 'g': case 'h': case 'i': case 'j': case 'k': case 'l': case 'm': case 'n': case 'o': case 'p': case 'q': case 'r': case 's': case 't': case 'u': case 'v': case 'w': case 'x': case 'y': case 'z': goto yy75; default: goto yy40; } yy51: ++YYCURSOR; goto yy52; yy52: #line 467 "token.re" { goto TransferMethod; } #line 458 "" yy53: ++YYCURSOR; goto yy54; yy54: #line 469 "token.re" { ENSURE_YAML_IOPEN(lvl, doc_level, 1); goto SingleQuote; } #line 465 "" yy55: ++YYCURSOR; goto yy56; yy56: #line 472 "token.re" { ENSURE_YAML_IOPEN(lvl, doc_level, 1); goto DoubleQuote; } #line 472 "" yy57: yyaccept = 1; yych = *(YYMARKER = ++YYCURSOR); switch(yych){ case '\n': goto yy70; case '\r': goto yy74; case ' ': goto yy72; case '+': case '-': case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': goto yy67; default: goto yy40; } yy58: ++YYCURSOR; goto yy59; yy59: #line 482 "token.re" { eat_comments( parser ); goto Document; } #line 498 "" yy60: ++YYCURSOR; yych = *YYCURSOR; goto yy66; yy61: #line 486 "token.re" { goto Document; } #line 504 "" yy62: ++YYCURSOR; goto yy63; yy63: #line 488 "token.re" { ENSURE_YAML_IEND(lvl, -1); YYPOS(0); return 0; } #line 513 "" yy64: yych = *++YYCURSOR; goto yy40; yy65: ++YYCURSOR; if(YYLIMIT == YYCURSOR) YYFILL(1); yych = *YYCURSOR; goto yy66; yy66: switch(yych){ case ' ': goto yy65; default: goto yy61; } yy67: ++YYCURSOR; if(YYLIMIT == YYCURSOR) YYFILL(1); yych = *YYCURSOR; goto yy68; yy68: switch(yych){ case '\n': goto yy70; case '\r': goto yy74; case ' ': goto yy72; case '+': case '-': case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': goto yy67; default: goto yy69; } yy69: YYCURSOR = YYMARKER; switch(yyaccept){ case 0: goto yy38; case 1: goto yy40; } yy70: ++YYCURSOR; goto yy71; yy71: #line 475 "token.re" { if ( is_newline( YYCURSOR - 1 ) ) { YYCURSOR--; } goto ScalarBlock; } #line 561 "" yy72: ++YYCURSOR; if(YYLIMIT == YYCURSOR) YYFILL(1); yych = *YYCURSOR; goto yy73; yy73: switch(yych){ case ' ': goto yy72; default: goto yy71; } yy74: yych = *++YYCURSOR; switch(yych){ case '\n': goto yy70; default: goto yy69; } yy75: ++YYCURSOR; if(YYLIMIT == YYCURSOR) YYFILL(1); yych = *YYCURSOR; goto yy76; yy76: switch(yych){ case '-': case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': case 'A': case 'B': case 'C': case 'D': case 'E': case 'F': case 'G': case 'H': case 'I': case 'J': case 'K': case 'L': case 'M': case 'N': case 'O': case 'P': case 'Q': case 'R': case 'S': case 'T': case 'U': case 'V': case 'W': case 'X': case 'Y': case 'Z': case '_': case 'a': case 'b': case 'c': case 'd': case 'e': case 'f': case 'g': case 'h': case 'i': case 'j': case 'k': case 'l': case 'm': case 'n': case 'o': case 'p': case 'q': case 'r': case 's': case 't': case 'u': case 'v': case 'w': case 'x': case 'y': case 'z': goto yy75; default: goto yy77; } yy77: #line 462 "token.re" { ENSURE_YAML_IOPEN(lvl, doc_level, 1); sycklval->name = syck_strndup( YYTOKEN + 1, YYCURSOR - YYTOKEN - 1 ); return YAML_ALIAS; } #line 650 "" yy78: ++YYCURSOR; if(YYLIMIT == YYCURSOR) YYFILL(1); yych = *YYCURSOR; goto yy79; yy79: switch(yych){ case '-': case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': case 'A': case 'B': case 'C': case 'D': case 'E': case 'F': case 'G': case 'H': case 'I': case 'J': case 'K': case 'L': case 'M': case 'N': case 'O': case 'P': case 'Q': case 'R': case 'S': case 'T': case 'U': case 'V': case 'W': case 'X': case 'Y': case 'Z': case '_': case 'a': case 'b': case 'c': case 'd': case 'e': case 'f': case 'g': case 'h': case 'i': case 'j': case 'k': case 'l': case 'm': case 'n': case 'o': case 'p': case 'q': case 'r': case 's': case 't': case 'u': case 'v': case 'w': case 'x': case 'y': case 'z': goto yy78; default: goto yy80; } yy80: #line 451 "token.re" { sycklval->name = syck_strndup( YYTOKEN + 1, YYCURSOR - YYTOKEN - 1 ); /* * Remove previous anchors of the same name. Since the parser will likely * construct deeper nodes first, we want those nodes to be placed in the * queue for matching at a higher level of indentation. */ syck_hdlr_remove_anchor(parser, sycklval->name); return YAML_ANCHOR; } #line 731 "" yy81: ++YYCURSOR; goto yy82; yy82: #line 437 "token.re" { ENSURE_YAML_IOPEN(lvl, YYTOKEN - YYLINEPTR, 1); FORCE_NEXT_TOKEN(YAML_IOPEN); if ( *YYCURSOR == '#' || is_newline( YYCURSOR ) || is_newline( YYCURSOR - 1 ) ) { YYCURSOR--; ADD_LEVEL((YYTOKEN + 1) - YYLINEPTR, syck_lvl_doc); } else /* spaces followed by content uses the space as indentation */ { ADD_LEVEL(YYCURSOR - YYLINEPTR, syck_lvl_doc); } return YYTOKEN[0]; } #line 749 "" yy83: ++YYCURSOR; if(YYLIMIT == YYCURSOR) YYFILL(1); yych = *YYCURSOR; goto yy84; yy84: switch(yych){ case ' ': goto yy83; default: goto yy82; } yy85: yych = *++YYCURSOR; switch(yych){ case '\n': goto yy81; default: goto yy69; } yy86: ++YYCURSOR; goto yy87; yy87: #line 433 "token.re" { YYPOS(1); return YYTOKEN[0]; } #line 771 "" yy88: ++YYCURSOR; if(YYLIMIT == YYCURSOR) YYFILL(1); yych = *YYCURSOR; goto yy89; yy89: switch(yych){ case ' ': goto yy88; default: goto yy87; } yy90: yych = *++YYCURSOR; switch(yych){ case '\n': goto yy86; default: goto yy69; } yy91: yyaccept = 0; YYMARKER = ++YYCURSOR; if(YYLIMIT == YYCURSOR) YYFILL(1); yych = *YYCURSOR; goto yy92; yy92: switch(yych){ case '\n': case ' ': goto yy91; case '\r': goto yy93; default: goto yy38; } yy93: ++YYCURSOR; if(YYLIMIT == YYCURSOR) YYFILL(1); yych = *YYCURSOR; switch(yych){ case '\n': goto yy91; default: goto yy69; } } #line 497 "token.re" } Directive: { YYTOKTMP = YYCURSOR; #line 807 "" { YYCTYPE yych; unsigned int yyaccept; goto yy94; yy95: ++YYCURSOR; yy94: if((YYLIMIT - YYCURSOR) < 2) YYFILL(2); yych = *YYCURSOR; switch(yych){ case '\000': goto yy96; case ' ': goto yy99; case '%': goto yy97; default: goto yy101; } yy96: YYCURSOR = YYMARKER; switch(yyaccept){ case 0: goto yy98; } yy97: yyaccept = 0; yych = *(YYMARKER = ++YYCURSOR); switch(yych){ case '.': case '/': case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': case ':': case ';': case '<': case '=': case '>': case '?': case '@': case 'A': case 'B': case 'C': case 'D': case 'E': case 'F': case 'G': case 'H': case 'I': case 'J': case 'K': case 'L': case 'M': case 'N': case 'O': case 'P': case 'Q': case 'R': case 'S': case 'T': case 'U': case 'V': case 'W': case 'X': case 'Y': case 'Z': case '[': case '\\': case ']': case '^': case '_': case 'a': case 'b': case 'c': case 'd': case 'e': case 'f': case 'g': case 'h': case 'i': case 'j': case 'k': case 'l': case 'm': case 'n': case 'o': case 'p': case 'q': case 'r': case 's': case 't': case 'u': case 'v': case 'w': case 'x': case 'y': case 'z': goto yy104; default: goto yy98; } yy98: #line 510 "token.re" { YYCURSOR = YYTOKTMP; return YAML_DOCSEP; } #line 911 "" yy99: ++YYCURSOR; yych = *YYCURSOR; goto yy103; yy100: #line 508 "token.re" { goto Directive; } #line 917 "" yy101: yych = *++YYCURSOR; goto yy98; yy102: ++YYCURSOR; if(YYLIMIT == YYCURSOR) YYFILL(1); yych = *YYCURSOR; goto yy103; yy103: switch(yych){ case ' ': goto yy102; default: goto yy100; } yy104: ++YYCURSOR; if(YYLIMIT == YYCURSOR) YYFILL(1); yych = *YYCURSOR; goto yy105; yy105: switch(yych){ case '.': case '/': case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': case ';': case '<': case '=': case '>': case '?': case '@': case 'A': case 'B': case 'C': case 'D': case 'E': case 'F': case 'G': case 'H': case 'I': case 'J': case 'K': case 'L': case 'M': case 'N': case 'O': case 'P': case 'Q': case 'R': case 'S': case 'T': case 'U': case 'V': case 'W': case 'X': case 'Y': case 'Z': case '[': case '\\': case ']': case '^': case '_': case 'a': case 'b': case 'c': case 'd': case 'e': case 'f': case 'g': case 'h': case 'i': case 'j': case 'k': case 'l': case 'm': case 'n': case 'o': case 'p': case 'q': case 'r': case 's': case 't': case 'u': case 'v': case 'w': case 'x': case 'y': case 'z': goto yy104; case ':': goto yy106; default: goto yy96; } yy106: yych = *++YYCURSOR; switch(yych){ case '.': case '/': case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': case ':': case ';': case '<': case '=': case '>': case '?': case '@': case 'A': case 'B': case 'C': case 'D': case 'E': case 'F': case 'G': case 'H': case 'I': case 'J': case 'K': case 'L': case 'M': case 'N': case 'O': case 'P': case 'Q': case 'R': case 'S': case 'T': case 'U': case 'V': case 'W': case 'X': case 'Y': case 'Z': case '[': case '\\': case ']': case '^': case '_': case 'a': case 'b': case 'c': case 'd': case 'e': case 'f': case 'g': case 'h': case 'i': case 'j': case 'k': case 'l': case 'm': case 'n': case 'o': case 'p': case 'q': case 'r': case 's': case 't': case 'u': case 'v': case 'w': case 'x': case 'y': case 'z': goto yy107; default: goto yy96; } yy107: ++YYCURSOR; if(YYLIMIT == YYCURSOR) YYFILL(1); yych = *YYCURSOR; goto yy108; yy108: switch(yych){ case '.': case '/': case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': case ':': case ';': case '<': case '=': case '>': case '?': case '@': case 'A': case 'B': case 'C': case 'D': case 'E': case 'F': case 'G': case 'H': case 'I': case 'J': case 'K': case 'L': case 'M': case 'N': case 'O': case 'P': case 'Q': case 'R': case 'S': case 'T': case 'U': case 'V': case 'W': case 'X': case 'Y': case 'Z': case '[': case '\\': case ']': case '^': case '_': case 'a': case 'b': case 'c': case 'd': case 'e': case 'f': case 'g': case 'h': case 'i': case 'j': case 'k': case 'l': case 'm': case 'n': case 'o': case 'p': case 'q': case 'r': case 's': case 't': case 'u': case 'v': case 'w': case 'x': case 'y': case 'z': goto yy107; default: goto yy109; } yy109: #line 506 "token.re" { goto Directive; } #line 1176 "" } #line 513 "token.re" } Plain: { int qidx = 0; int qcapa = 100; char *qstr = S_ALLOC_N( char, qcapa ); SyckLevel *plvl; int parentIndent; YYCURSOR = YYTOKEN; plvl = CURRENT_LEVEL(); GET_TRUE_YAML_INDENT(parentIndent); Plain2: YYTOKEN = YYCURSOR; Plain3: #line 1180 "" { YYCTYPE yych; unsigned int yyaccept; goto yy110; yy111: ++YYCURSOR; yy110: if((YYLIMIT - YYCURSOR) < 3) YYFILL(3); yych = *YYCURSOR; switch(yych){ case '\000': goto yy124; case '\n': goto yy112; case '\r': goto yy114; case ' ': goto yy122; case ',': goto yy117; case ':': goto yy116; case ']': goto yy120; case '}': goto yy118; default: goto yy126; } yy112: yyaccept = 0; yych = *(YYMARKER = ++YYCURSOR); goto yy141; yy113: #line 536 "token.re" { int indt_len, nl_count = 0; SyckLevel *lvl; char *tok = YYTOKEN; GOBBLE_UP_YAML_INDENT( indt_len, tok ); lvl = CURRENT_LEVEL(); if ( indt_len <= parentIndent ) { RETURN_IMPLICIT(); } while ( YYTOKEN < YYCURSOR ) { int nl_len = 0; if ( nl_len = newline_len( YYTOKEN++ ) ) { nl_count++; YYTOKEN += nl_len - 1; } } if ( nl_count <= 1 ) { QUOTECAT(qstr, qcapa, qidx, ' '); } else { int i; for ( i = 0; i < nl_count - 1; i++ ) { QUOTECAT(qstr, qcapa, qidx, '\n'); } } goto Plain2; } #line 1240 "" yy114: ++YYCURSOR; switch((yych = *YYCURSOR)) { case '\n': goto yy140; default: goto yy115; } yy115: #line 615 "token.re" { QUOTECATS(qstr, qcapa, qidx, YYTOKEN, YYCURSOR - YYTOKEN); goto Plain2; } #line 1251 "" yy116: yyaccept = 1; yych = *(YYMARKER = ++YYCURSOR); switch(yych){ case '\n': goto yy135; case '\r': goto yy139; case ' ': goto yy137; default: goto yy115; } yy117: yyaccept = 1; yych = *(YYMARKER = ++YYCURSOR); switch(yych){ case '\n': goto yy129; case '\r': goto yy133; case ' ': goto yy131; default: goto yy115; } yy118: ++YYCURSOR; goto yy119; yy119: #line 585 "token.re" { if ( plvl->status != syck_lvl_imap ) { PLAIN_NOT_INL(); } else { PLAIN_IS_INL(); } RETURN_IMPLICIT(); } #line 1282 "" yy120: ++YYCURSOR; goto yy121; yy121: #line 596 "token.re" { if ( plvl->status != syck_lvl_iseq ) { PLAIN_NOT_INL(); } else { PLAIN_IS_INL(); } RETURN_IMPLICIT(); } #line 1297 "" yy122: ++YYCURSOR; switch((yych = *YYCURSOR)) { case '#': goto yy127; default: goto yy123; } yy123: #line 613 "token.re" { goto Plain3; } #line 1306 "" yy124: ++YYCURSOR; goto yy125; yy125: #line 611 "token.re" { RETURN_IMPLICIT(); } #line 1312 "" yy126: yych = *++YYCURSOR; goto yy115; yy127: ++YYCURSOR; goto yy128; yy128: #line 607 "token.re" { eat_comments( parser ); RETURN_IMPLICIT(); } #line 1322 "" yy129: ++YYCURSOR; goto yy130; yy130: #line 574 "token.re" { if ( plvl->status != syck_lvl_iseq && plvl->status != syck_lvl_imap ) { PLAIN_NOT_INL(); } else { PLAIN_IS_INL(); } RETURN_IMPLICIT(); } #line 1337 "" yy131: ++YYCURSOR; if(YYLIMIT == YYCURSOR) YYFILL(1); yych = *YYCURSOR; goto yy132; yy132: switch(yych){ case ' ': goto yy131; default: goto yy130; } yy133: yych = *++YYCURSOR; switch(yych){ case '\n': goto yy129; default: goto yy134; } yy134: YYCURSOR = YYMARKER; switch(yyaccept){ case 0: goto yy113; case 1: goto yy115; } yy135: ++YYCURSOR; goto yy136; yy136: #line 572 "token.re" { RETURN_IMPLICIT(); } #line 1362 "" yy137: ++YYCURSOR; if(YYLIMIT == YYCURSOR) YYFILL(1); yych = *YYCURSOR; goto yy138; yy138: switch(yych){ case ' ': goto yy137; default: goto yy136; } yy139: yych = *++YYCURSOR; switch(yych){ case '\n': goto yy135; default: goto yy134; } yy140: yyaccept = 0; YYMARKER = ++YYCURSOR; if(YYLIMIT == YYCURSOR) YYFILL(1); yych = *YYCURSOR; goto yy141; yy141: switch(yych){ case '\n': case ' ': goto yy140; case '\r': goto yy142; default: goto yy113; } yy142: ++YYCURSOR; if(YYLIMIT == YYCURSOR) YYFILL(1); yych = *YYCURSOR; switch(yych){ case '\n': goto yy140; default: goto yy134; } } #line 619 "token.re" } SingleQuote: { int qidx = 0; int qcapa = 100; char *qstr = S_ALLOC_N( char, qcapa ); SingleQuote2: YYTOKEN = YYCURSOR; #line 1398 "" { YYCTYPE yych; unsigned int yyaccept; goto yy143; yy144: ++YYCURSOR; yy143: if((YYLIMIT - YYCURSOR) < 2) YYFILL(2); yych = *YYCURSOR; switch(yych){ case '\000': goto yy151; case '\n': goto yy145; case '\r': goto yy147; case '\'': goto yy149; default: goto yy152; } yy145: yyaccept = 0; yych = *(YYMARKER = ++YYCURSOR); goto yy156; yy146: #line 633 "token.re" { int indt_len; int nl_count = 0; SyckLevel *lvl; GOBBLE_UP_YAML_INDENT( indt_len, YYTOKEN ); lvl = CURRENT_LEVEL(); if ( lvl->status != syck_lvl_str ) { ADD_LEVEL( indt_len, syck_lvl_str ); } else if ( indt_len < lvl->spaces ) { /* Error! */ } while ( YYTOKEN < YYCURSOR ) { int nl_len = 0; if ( nl_len = newline_len( YYTOKEN++ ) ) { nl_count++; YYTOKEN += nl_len - 1; } } if ( nl_count <= 1 ) { QUOTECAT(qstr, qcapa, qidx, ' '); } else { int i; for ( i = 0; i < nl_count - 1; i++ ) { QUOTECAT(qstr, qcapa, qidx, '\n'); } } goto SingleQuote2; } #line 1458 "" yy147: ++YYCURSOR; switch((yych = *YYCURSOR)) { case '\n': goto yy155; default: goto yy148; } yy148: #line 700 "token.re" { QUOTECAT(qstr, qcapa, qidx, *(YYCURSOR - 1)); goto SingleQuote2; } #line 1469 "" yy149: ++YYCURSOR; switch((yych = *YYCURSOR)) { case '\'': goto yy153; default: goto yy150; } yy150: #line 677 "token.re" { SyckLevel *lvl; SyckNode *n = syck_alloc_str(); lvl = CURRENT_LEVEL(); if ( lvl->status == syck_lvl_str ) { POP_LEVEL(); } if ( ((SyckParser *)parser)->taguri_expansion == 1 ) { n->type_id = syck_taguri( YAML_DOMAIN, "str", 3 ); } else { n->type_id = syck_strndup( "str", 3 ); } n->data.str->ptr = qstr; n->data.str->len = qidx; n->data.str->style = scalar_1quote; sycklval->nodeData = n; return YAML_PLAIN; } #line 1499 "" yy151: yych = *++YYCURSOR; goto yy150; yy152: yych = *++YYCURSOR; goto yy148; yy153: ++YYCURSOR; goto yy154; yy154: #line 673 "token.re" { QUOTECAT(qstr, qcapa, qidx, '\''); goto SingleQuote2; } #line 1511 "" yy155: yyaccept = 0; YYMARKER = ++YYCURSOR; if(YYLIMIT == YYCURSOR) YYFILL(1); yych = *YYCURSOR; goto yy156; yy156: switch(yych){ case '\n': case ' ': goto yy155; case '\r': goto yy157; default: goto yy146; } yy157: ++YYCURSOR; if(YYLIMIT == YYCURSOR) YYFILL(1); yych = *YYCURSOR; switch(yych){ case '\n': goto yy155; default: goto yy158; } yy158: YYCURSOR = YYMARKER; switch(yyaccept){ case 0: goto yy146; } } #line 704 "token.re" } DoubleQuote: { int keep_nl = 1; int qidx = 0; int qcapa = 100; char *qstr = S_ALLOC_N( char, qcapa ); DoubleQuote2: YYTOKEN = YYCURSOR; #line 1537 "" { YYCTYPE yych; unsigned int yyaccept; goto yy159; yy160: ++YYCURSOR; yy159: if((YYLIMIT - YYCURSOR) < 4) YYFILL(4); yych = *YYCURSOR; switch(yych){ case '\000': goto yy166; case '\n': goto yy161; case '\r': goto yy163; case '"': goto yy168; case '\\': goto yy165; default: goto yy169; } yy161: yyaccept = 0; yych = *(YYMARKER = ++YYCURSOR); goto yy183; yy162: #line 722 "token.re" { int indt_len; int nl_count = 0; SyckLevel *lvl; GOBBLE_UP_YAML_INDENT( indt_len, YYTOKEN ); lvl = CURRENT_LEVEL(); if ( lvl->status != syck_lvl_str ) { ADD_LEVEL( indt_len, syck_lvl_str ); } else if ( indt_len < lvl->spaces ) { /* FIXME */ } if ( keep_nl == 1 ) { while ( YYTOKEN < YYCURSOR ) { int nl_len = 0; if ( nl_len = newline_len( YYTOKEN++ ) ) { nl_count++; YYTOKEN += nl_len - 1; } } if ( nl_count <= 1 ) { QUOTECAT(qstr, qcapa, qidx, ' '); } else { int i; for ( i = 0; i < nl_count - 1; i++ ) { QUOTECAT(qstr, qcapa, qidx, '\n'); } } } keep_nl = 1; goto DoubleQuote2; } #line 1602 "" yy163: ++YYCURSOR; switch((yych = *YYCURSOR)) { case '\n': goto yy182; default: goto yy164; } yy164: #line 808 "token.re" { QUOTECAT(qstr, qcapa, qidx, *(YYCURSOR - 1)); goto DoubleQuote2; } #line 1613 "" yy165: yyaccept = 1; yych = *(YYMARKER = ++YYCURSOR); switch(yych){ case '\n': goto yy173; case '\r': goto yy175; case ' ': goto yy170; case '"': case '0': case '\\': case 'a': case 'b': case 'e': case 'f': case 'n': case 'r': case 't': case 'v': goto yy177; case 'x': goto yy176; default: goto yy164; } yy166: ++YYCURSOR; goto yy167; yy167: #line 785 "token.re" { SyckLevel *lvl; SyckNode *n = syck_alloc_str(); lvl = CURRENT_LEVEL(); if ( lvl->status == syck_lvl_str ) { POP_LEVEL(); } if ( ((SyckParser *)parser)->taguri_expansion == 1 ) { n->type_id = syck_taguri( YAML_DOMAIN, "str", 3 ); } else { n->type_id = syck_strndup( "str", 3 ); } n->data.str->ptr = qstr; n->data.str->len = qidx; n->data.str->style = scalar_2quote; sycklval->nodeData = n; return YAML_PLAIN; } #line 1652 "" yy168: yych = *++YYCURSOR; goto yy167; yy169: yych = *++YYCURSOR; goto yy164; yy170: ++YYCURSOR; if(YYLIMIT == YYCURSOR) YYFILL(1); yych = *YYCURSOR; goto yy171; yy171: switch(yych){ case '\n': goto yy173; case '\r': goto yy175; case ' ': goto yy170; default: goto yy172; } yy172: YYCURSOR = YYMARKER; switch(yyaccept){ case 0: goto yy162; case 1: goto yy164; } yy173: ++YYCURSOR; goto yy174; yy174: #line 780 "token.re" { keep_nl = 0; YYCURSOR--; goto DoubleQuote2; } #line 1681 "" yy175: yych = *++YYCURSOR; switch(yych){ case '\n': goto yy173; default: goto yy172; } yy176: yych = *++YYCURSOR; switch(yych){ case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': case 'A': case 'B': case 'C': case 'D': case 'E': case 'F': case 'a': case 'b': case 'c': case 'd': case 'e': case 'f': goto yy179; default: goto yy172; } yy177: ++YYCURSOR; goto yy178; yy178: #line 766 "token.re" { char ch = *( YYCURSOR - 1 ); QUOTECAT(qstr, qcapa, qidx, escape_seq( ch )); goto DoubleQuote2; } #line 1719 "" yy179: yych = *++YYCURSOR; switch(yych){ case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': case 'A': case 'B': case 'C': case 'D': case 'E': case 'F': case 'a': case 'b': case 'c': case 'd': case 'e': case 'f': goto yy180; default: goto yy172; } yy180: ++YYCURSOR; goto yy181; yy181: #line 771 "token.re" { long ch; char *chr_text = syck_strndup( YYTOKEN, 4 ); chr_text[0] = '0'; ch = strtol( chr_text, NULL, 16 ); free( chr_text ); QUOTECAT(qstr, qcapa, qidx, ch); goto DoubleQuote2; } #line 1756 "" yy182: yyaccept = 0; YYMARKER = ++YYCURSOR; if(YYLIMIT == YYCURSOR) YYFILL(1); yych = *YYCURSOR; goto yy183; yy183: switch(yych){ case '\n': case ' ': goto yy182; case '\r': goto yy184; default: goto yy162; } yy184: ++YYCURSOR; if(YYLIMIT == YYCURSOR) YYFILL(1); yych = *YYCURSOR; switch(yych){ case '\n': goto yy182; default: goto yy172; } } #line 812 "token.re" } TransferMethod: { int qidx = 0; int qcapa = 100; char *qstr = S_ALLOC_N( char, qcapa ); TransferMethod2: YYTOKTMP = YYCURSOR; #line 1778 "" { YYCTYPE yych; unsigned int yyaccept; goto yy185; yy186: ++YYCURSOR; yy185: if((YYLIMIT - YYCURSOR) < 4) YYFILL(4); yych = *YYCURSOR; switch(yych){ case '\000': goto yy187; case '\n': goto yy189; case '\r': goto yy191; case ' ': goto yy190; case '\\': goto yy193; default: goto yy194; } yy187: ++YYCURSOR; goto yy188; yy188: #line 826 "token.re" { SyckLevel *lvl; YYCURSOR = YYTOKTMP; if ( YYCURSOR == YYTOKEN + 1 ) { free( qstr ); return YAML_ITRANSFER; } lvl = CURRENT_LEVEL(); /* * URL Prefixing */ if ( *qstr == '^' ) { sycklval->name = S_ALLOC_N( char, qidx + strlen( lvl->domain ) ); sycklval->name[0] = '\0'; strcat( sycklval->name, lvl->domain ); strncat( sycklval->name, qstr + 1, qidx - 1 ); free( qstr ); } else { char *carat = qstr; char *qend = qstr + qidx; while ( (++carat) < qend ) { if ( *carat == '^' ) break; } if ( carat < qend ) { free( lvl->domain ); lvl->domain = syck_strndup( qstr, carat - qstr ); sycklval->name = S_ALLOC_N( char, ( qend - carat ) + strlen( lvl->domain ) ); sycklval->name[0] = '\0'; strcat( sycklval->name, lvl->domain ); strncat( sycklval->name, carat + 1, ( qend - carat ) - 1 ); free( qstr ); } else { sycklval->name = qstr; } } return YAML_TRANSFER; } #line 1848 "" yy189: yych = *++YYCURSOR; goto yy188; yy190: yych = *++YYCURSOR; goto yy203; yy191: ++YYCURSOR; switch((yych = *YYCURSOR)) { case '\n': goto yy187; default: goto yy192; } yy192: #line 893 "token.re" { QUOTECAT(qstr, qcapa, qidx, *(YYCURSOR - 1)); goto TransferMethod2; } #line 1863 "" yy193: yyaccept = 0; yych = *(YYMARKER = ++YYCURSOR); switch(yych){ case '"': case '0': case '\\': case 'a': case 'b': case 'e': case 'f': case 'n': case 'r': case 't': case 'v': goto yy197; case 'x': goto yy195; default: goto yy192; } yy194: yych = *++YYCURSOR; goto yy192; yy195: yych = *++YYCURSOR; switch(yych){ case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': case 'A': case 'B': case 'C': case 'D': case 'E': case 'F': case 'a': case 'b': case 'c': case 'd': case 'e': case 'f': goto yy199; default: goto yy196; } yy196: YYCURSOR = YYMARKER; switch(yyaccept){ case 0: goto yy192; } yy197: ++YYCURSOR; goto yy198; yy198: #line 879 "token.re" { char ch = *( YYCURSOR - 1 ); QUOTECAT(qstr, qcapa, qidx, escape_seq( ch )); goto TransferMethod2; } #line 1911 "" yy199: yych = *++YYCURSOR; switch(yych){ case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': case 'A': case 'B': case 'C': case 'D': case 'E': case 'F': case 'a': case 'b': case 'c': case 'd': case 'e': case 'f': goto yy200; default: goto yy196; } yy200: ++YYCURSOR; goto yy201; yy201: #line 884 "token.re" { long ch; char *chr_text = syck_strndup( YYTOKTMP, 4 ); chr_text[0] = '0'; ch = strtol( chr_text, NULL, 16 ); free( chr_text ); QUOTECAT(qstr, qcapa, qidx, ch); goto TransferMethod2; } #line 1948 "" yy202: ++YYCURSOR; if(YYLIMIT == YYCURSOR) YYFILL(1); yych = *YYCURSOR; goto yy203; yy203: switch(yych){ case ' ': goto yy202; default: goto yy188; } } #line 898 "token.re" } ScalarBlock: { int qidx = 0; int qcapa = 100; char *qstr = S_ALLOC_N( char, qcapa ); int blockType = 0; int nlDoWhat = 0; int lastIndent = 0; int forceIndent = -1; char *yyt = YYTOKEN; SyckLevel *lvl = CURRENT_LEVEL(); int parentIndent; GET_TRUE_YAML_INDENT(parentIndent); switch ( *yyt ) { case '|': blockType = BLOCK_LIT; break; case '>': blockType = BLOCK_FOLD; break; } while ( ++yyt <= YYCURSOR ) { if ( *yyt == '-' ) { nlDoWhat = NL_CHOMP; } else if ( *yyt == '+' ) { nlDoWhat = NL_KEEP; } else if ( isdigit( *yyt ) ) { forceIndent = strtol( yyt, NULL, 10 ) + parentIndent; } } qstr[0] = '\0'; YYTOKEN = YYCURSOR; ScalarBlock2: YYTOKEN = YYCURSOR; #line 1961 "" { YYCTYPE yych; unsigned int yyaccept; goto yy204; yy205: ++YYCURSOR; yy204: if((YYLIMIT - YYCURSOR) < 5) YYFILL(5); yych = *YYCURSOR; switch(yych){ case '\000': goto yy212; case '\n': goto yy206; case '\r': goto yy208; case '#': goto yy210; case '-': goto yy214; default: goto yy215; } yy206: yyaccept = 0; yych = *(YYMARKER = ++YYCURSOR); goto yy225; yy207: #line 945 "token.re" { char *pacer; char *tok = YYTOKEN; int indt_len = 0, nl_count = 0, fold_nl = 0, nl_begin = 0; GOBBLE_UP_YAML_INDENT( indt_len, tok ); lvl = CURRENT_LEVEL(); if ( indt_len > parentIndent && lvl->status != syck_lvl_block ) { int new_spaces = forceIndent > 0 ? forceIndent : indt_len; ADD_LEVEL( new_spaces, syck_lvl_block ); lastIndent = indt_len - new_spaces; nl_begin = 1; lvl = CURRENT_LEVEL(); } else if ( lvl->status != syck_lvl_block ) { YYCURSOR = YYTOKEN; RETURN_YAML_BLOCK(); } /* * Fold only in the event of two lines being on the leftmost * indentation. */ if ( blockType == BLOCK_FOLD && lastIndent == 0 && ( indt_len - lvl->spaces ) == 0 ) { fold_nl = 1; } pacer = YYTOKEN; while ( pacer < YYCURSOR ) { int nl_len = 0; if ( nl_len = newline_len( pacer++ ) ) { nl_count++; pacer += nl_len - 1; } } if ( fold_nl == 1 || nl_begin == 1 ) { nl_count--; } if ( nl_count < 1 && nl_begin == 0 ) { QUOTECAT(qstr, qcapa, qidx, ' '); } else { int i; for ( i = 0; i < nl_count; i++ ) { QUOTECAT(qstr, qcapa, qidx, '\n'); } } lastIndent = indt_len - lvl->spaces; YYCURSOR -= lastIndent; if ( indt_len < lvl->spaces ) { POP_LEVEL(); YYCURSOR = YYTOKEN; RETURN_YAML_BLOCK(); } goto ScalarBlock2; } #line 2052 "" yy208: ++YYCURSOR; switch((yych = *YYCURSOR)) { case '\n': goto yy224; default: goto yy209; } yy209: #line 1054 "token.re" { QUOTECAT(qstr, qcapa, qidx, *YYTOKEN); goto ScalarBlock2; } #line 2063 "" yy210: ++YYCURSOR; goto yy211; yy211: #line 1016 "token.re" { lvl = CURRENT_LEVEL(); if ( lvl->status != syck_lvl_block ) { eat_comments( parser ); YYTOKEN = YYCURSOR; } else { QUOTECAT(qstr, qcapa, qidx, *YYTOKEN); } goto ScalarBlock2; } #line 2080 "" yy212: ++YYCURSOR; goto yy213; yy213: #line 1030 "token.re" { YYCURSOR--; POP_LEVEL(); RETURN_YAML_BLOCK(); } #line 2089 "" yy214: yyaccept = 1; yych = *(YYMARKER = ++YYCURSOR); switch(yych){ case '-': goto yy216; default: goto yy209; } yy215: yych = *++YYCURSOR; goto yy209; yy216: yych = *++YYCURSOR; switch(yych){ case '-': goto yy218; default: goto yy217; } yy217: YYCURSOR = YYMARKER; switch(yyaccept){ case 0: goto yy207; case 1: goto yy209; } yy218: yych = *++YYCURSOR; switch(yych){ case '\n': goto yy219; case '\r': goto yy223; case ' ': goto yy221; default: goto yy217; } yy219: ++YYCURSOR; goto yy220; yy220: #line 1035 "token.re" { if ( YYTOKEN == YYLINEPTR ) { if ( blockType == BLOCK_FOLD && qidx > 0 ) { qidx -= 1; } QUOTECAT(qstr, qcapa, qidx, '\n'); POP_LEVEL(); YYCURSOR = YYTOKEN; RETURN_YAML_BLOCK(); } else { QUOTECAT(qstr, qcapa, qidx, *YYTOKEN); YYCURSOR = YYTOKEN + 1; goto ScalarBlock2; } } #line 2137 "" yy221: ++YYCURSOR; if(YYLIMIT == YYCURSOR) YYFILL(1); yych = *YYCURSOR; goto yy222; yy222: switch(yych){ case ' ': goto yy221; default: goto yy220; } yy223: yych = *++YYCURSOR; switch(yych){ case '\n': goto yy219; default: goto yy217; } yy224: yyaccept = 0; YYMARKER = ++YYCURSOR; if(YYLIMIT == YYCURSOR) YYFILL(1); yych = *YYCURSOR; goto yy225; yy225: switch(yych){ case '\n': case ' ': goto yy224; case '\r': goto yy226; default: goto yy207; } yy226: ++YYCURSOR; if(YYLIMIT == YYCURSOR) YYFILL(1); yych = *YYCURSOR; switch(yych){ case '\n': goto yy224; default: goto yy217; } } #line 1059 "token.re" } return 0; } void eat_comments( SyckParser *parser ) { Comment: { YYTOKEN = YYCURSOR; #line 2173 "" { YYCTYPE yych; unsigned int yyaccept; goto yy227; yy228: ++YYCURSOR; yy227: if((YYLIMIT - YYCURSOR) < 2) YYFILL(2); yych = *YYCURSOR; switch(yych){ case '\000': goto yy229; case '\n': goto yy231; case '\r': goto yy232; default: goto yy234; } yy229: ++YYCURSOR; goto yy230; yy230: #line 1075 "token.re" { YYCURSOR = YYTOKEN; return; } #line 2195 "" yy231: yyaccept = 0; yych = *(YYMARKER = ++YYCURSOR); goto yy236; yy232: ++YYCURSOR; switch((yych = *YYCURSOR)) { case '\n': goto yy235; default: goto yy233; } yy233: #line 1079 "token.re" { goto Comment; } #line 2208 "" yy234: yych = *++YYCURSOR; goto yy233; yy235: yyaccept = 0; YYMARKER = ++YYCURSOR; if(YYLIMIT == YYCURSOR) YYFILL(1); yych = *YYCURSOR; goto yy236; yy236: switch(yych){ case '\n': goto yy235; case '\r': goto yy237; default: goto yy230; } yy237: ++YYCURSOR; if(YYLIMIT == YYCURSOR) YYFILL(1); yych = *YYCURSOR; switch(yych){ case '\n': goto yy235; default: goto yy238; } yy238: YYCURSOR = YYMARKER; switch(yyaccept){ case 0: goto yy230; } } #line 1082 "token.re" } } char escape_seq( char ch ) { switch ( ch ) { case '0': return '\0'; case 'a': return 7; case 'b': return '\010'; case 'e': return '\033'; case 'f': return '\014'; case 'n': return '\n'; case 'r': return '\015'; case 't': return '\t'; case 'v': return '\013'; default: return ch; } } int is_newline( char *ptr ) { return newline_len( ptr ); } int newline_len( char *ptr ) { if ( *ptr == '\n' ) return 1; if ( *ptr == '\r' && *( ptr + 1 ) == '\n' ) return 2; return 0; } int syckwrap() { return 1; } void syckerror( char *msg ) { if ( syck_parser_ptr->error_handler == NULL ) syck_parser_ptr->error_handler = syck_default_error_handler; syck_parser_ptr->root = syck_parser_ptr->root_on_error; (syck_parser_ptr->error_handler)(syck_parser_ptr, msg); } ruby-mkrf-0.2.3.orig/test/sample_files/syck-0.55/ext/ruby/ext/syck/handler.c0000644000000000000000000000650311672453175025201 0ustar rootroot/* * handler.c * * $Author: why $ * $Date: 2005/01/08 21:44:00 $ * * Copyright (C) 2003 why the lucky stiff */ #include "syck.h" SYMID syck_hdlr_add_node( SyckParser *p, SyckNode *n ) { SYMID id; if ( ! n->id ) { n->id = (p->handler)( p, n ); } id = n->id; if ( n->anchor == NULL ) { syck_free_node( n ); } return id; } SyckNode * syck_hdlr_add_anchor( SyckParser *p, char *a, SyckNode *n ) { char *atmp = NULL; SyckNode *ntmp = NULL; n->anchor = a; if ( p->bad_anchors != NULL ) { SyckNode *bad; if ( st_lookup( p->bad_anchors, (st_data_t)a, (st_data_t *)&bad ) ) { if ( n->kind != syck_str_kind ) { n->id = bad->id; (p->handler)( p, n ); } } } if ( p->anchors == NULL ) { p->anchors = st_init_strtable(); } if ( st_lookup( p->anchors, (st_data_t)a, (st_data_t *)&ntmp ) ) { if ( ntmp != (void *)1 ) { syck_free_node( ntmp ); } } st_insert( p->anchors, (st_data_t)a, (st_data_t)n ); return n; } void syck_hdlr_remove_anchor( SyckParser *p, char *a ) { char *atmp = a; SyckNode *ntmp; if ( p->anchors == NULL ) { p->anchors = st_init_strtable(); } if ( st_delete( p->anchors, (st_data_t *)&atmp, (st_data_t *)&ntmp ) ) { if ( ntmp != (void *)1 ) { syck_free_node( ntmp ); } } st_insert( p->anchors, (st_data_t)a, (st_data_t)1 ); } SyckNode * syck_hdlr_get_anchor( SyckParser *p, char *a ) { SyckNode *n = NULL; if ( p->anchors != NULL ) { if ( st_lookup( p->anchors, (st_data_t)a, (st_data_t *)&n ) ) { if ( n != (void *)1 ) { S_FREE( a ); return n; } else { if ( p->bad_anchors == NULL ) { p->bad_anchors = st_init_strtable(); } if ( ! st_lookup( p->bad_anchors, (st_data_t)a, (st_data_t *)&n ) ) { n = (p->bad_anchor_handler)( p, a ); st_insert( p->bad_anchors, (st_data_t)a, (st_data_t)n ); } } } } if ( n == NULL ) { n = (p->bad_anchor_handler)( p, a ); } if ( n->anchor ) { S_FREE( a ); } else { n->anchor = a; } return n; } void syck_add_transfer( char *uri, SyckNode *n, int taguri ) { if ( n->type_id != NULL ) { S_FREE( n->type_id ); } if ( taguri == 0 ) { n->type_id = uri; return; } n->type_id = syck_type_id_to_uri( uri ); S_FREE( uri ); } char * syck_xprivate( char *type_id, int type_len ) { char *uri = S_ALLOC_N( char, type_len + 14 ); uri[0] = '\0'; strcat( uri, "x-private:" ); strncat( uri, type_id, type_len ); return uri; } char * syck_taguri( char *domain, char *type_id, int type_len ) { char *uri = S_ALLOC_N( char, strlen( domain ) + type_len + 14 ); uri[0] = '\0'; strcat( uri, "tag:" ); strcat( uri, domain ); strcat( uri, ":" ); strncat( uri, type_id, type_len ); return uri; } int syck_try_implicit( SyckNode *n ) { return 1; } ruby-mkrf-0.2.3.orig/test/sample_files/syck-0.55/ext/ruby/ext/syck/gram.c0000644000000000000000000015656311672453175024526 0ustar rootroot/* A Bison parser, made by GNU Bison 1.875d. */ /* Skeleton parser for Yacc-like parsing with Bison, Copyright (C) 1984, 1989, 1990, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc. This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, write to the Free Software Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. */ /* As a special exception, when this file is copied by Bison into a Bison output file, you may use that output file without restriction. This special exception was added by the Free Software Foundation in version 1.24 of Bison. */ /* Written by Richard Stallman by simplifying the original so called ``semantic'' parser. */ /* All symbols defined below should begin with yy or YY, to avoid infringing on user name space. This should be done even for local variables, as they might otherwise be expanded by user macros. There are some unavoidable exceptions within include files to define necessary library symbols; they are noted "INFRINGES ON USER NAME SPACE" below. */ /* Identify Bison output. */ #define YYBISON 1 /* Skeleton name. */ #define YYSKELETON_NAME "yacc.c" /* Pure parsers. */ #define YYPURE 1 /* Using locations. */ #define YYLSP_NEEDED 0 /* If NAME_PREFIX is specified substitute the variables and functions names. */ #define yyparse syckparse #define yylex sycklex #define yyerror syckerror #define yylval sycklval #define yychar syckchar #define yydebug syckdebug #define yynerrs sycknerrs /* Tokens. */ #ifndef YYTOKENTYPE # define YYTOKENTYPE /* Put the tokens into the symbol table, so that GDB and other debuggers know about them. */ enum yytokentype { YAML_ANCHOR = 258, YAML_ALIAS = 259, YAML_TRANSFER = 260, YAML_TAGURI = 261, YAML_ITRANSFER = 262, YAML_WORD = 263, YAML_PLAIN = 264, YAML_BLOCK = 265, YAML_DOCSEP = 266, YAML_IOPEN = 267, YAML_INDENT = 268, YAML_IEND = 269 }; #endif #define YAML_ANCHOR 258 #define YAML_ALIAS 259 #define YAML_TRANSFER 260 #define YAML_TAGURI 261 #define YAML_ITRANSFER 262 #define YAML_WORD 263 #define YAML_PLAIN 264 #define YAML_BLOCK 265 #define YAML_DOCSEP 266 #define YAML_IOPEN 267 #define YAML_INDENT 268 #define YAML_IEND 269 /* Copy the first part of user declarations. */ #line 14 "gram.y" #include "syck.h" void apply_seq_in_map( SyckParser *parser, SyckNode *n ); #define YYPARSE_PARAM parser #define YYLEX_PARAM parser #define NULL_NODE(parser, node) \ SyckNode *node = syck_new_str( "", scalar_plain ); \ if ( ((SyckParser *)parser)->taguri_expansion == 1 ) \ { \ node->type_id = syck_taguri( YAML_DOMAIN, "null", 4 ); \ } \ else \ { \ node->type_id = syck_strndup( "null", 4 ); \ } /* Enabling traces. */ #ifndef YYDEBUG # define YYDEBUG 1 #endif /* Enabling verbose error messages. */ #ifdef YYERROR_VERBOSE # undef YYERROR_VERBOSE # define YYERROR_VERBOSE 1 #else # define YYERROR_VERBOSE 0 #endif #if ! defined (YYSTYPE) && ! defined (YYSTYPE_IS_DECLARED) #line 35 "gram.y" typedef union YYSTYPE { SYMID nodeId; SyckNode *nodeData; char *name; } YYSTYPE; /* Line 191 of yacc.c. */ #line 140 "gram.c" # define yystype YYSTYPE /* obsolescent; will be withdrawn */ # define YYSTYPE_IS_DECLARED 1 # define YYSTYPE_IS_TRIVIAL 1 #endif /* Copy the second part of user declarations. */ /* Line 214 of yacc.c. */ #line 152 "gram.c" #if ! defined (yyoverflow) || YYERROR_VERBOSE # ifndef YYFREE # define YYFREE free # endif # ifndef YYMALLOC # define YYMALLOC malloc # endif /* The parser invokes alloca or malloc; define the necessary symbols. */ # ifdef YYSTACK_USE_ALLOCA # if YYSTACK_USE_ALLOCA # define YYSTACK_ALLOC alloca # endif # else # if defined (alloca) || defined (_ALLOCA_H) # define YYSTACK_ALLOC alloca # else # ifdef __GNUC__ # define YYSTACK_ALLOC __builtin_alloca # endif # endif # endif # ifdef YYSTACK_ALLOC /* Pacify GCC's `empty if-body' warning. */ # define YYSTACK_FREE(Ptr) do { /* empty */; } while (0) # else # if defined (__STDC__) || defined (__cplusplus) # include /* INFRINGES ON USER NAME SPACE */ # define YYSIZE_T size_t # endif # define YYSTACK_ALLOC YYMALLOC # define YYSTACK_FREE YYFREE # endif #endif /* ! defined (yyoverflow) || YYERROR_VERBOSE */ #if (! defined (yyoverflow) \ && (! defined (__cplusplus) \ || (defined (YYSTYPE_IS_TRIVIAL) && YYSTYPE_IS_TRIVIAL))) /* A type that is properly aligned for any stack member. */ union yyalloc { short int yyss; YYSTYPE yyvs; }; /* The size of the maximum gap between one aligned stack and the next. */ # define YYSTACK_GAP_MAXIMUM (sizeof (union yyalloc) - 1) /* The size of an array large to enough to hold all stacks, each with N elements. */ # define YYSTACK_BYTES(N) \ ((N) * (sizeof (short int) + sizeof (YYSTYPE)) \ + YYSTACK_GAP_MAXIMUM) /* Copy COUNT objects from FROM to TO. The source and destination do not overlap. */ # ifndef YYCOPY # if defined (__GNUC__) && 1 < __GNUC__ # define YYCOPY(To, From, Count) \ __builtin_memcpy (To, From, (Count) * sizeof (*(From))) # else # define YYCOPY(To, From, Count) \ do \ { \ register YYSIZE_T yyi; \ for (yyi = 0; yyi < (Count); yyi++) \ (To)[yyi] = (From)[yyi]; \ } \ while (0) # endif # endif /* Relocate STACK from its old location to the new one. The local variables YYSIZE and YYSTACKSIZE give the old and new number of elements in the stack, and YYPTR gives the new location of the stack. Advance YYPTR to a properly aligned location for the next stack. */ # define YYSTACK_RELOCATE(Stack) \ do \ { \ YYSIZE_T yynewbytes; \ YYCOPY (&yyptr->Stack, Stack, yysize); \ Stack = &yyptr->Stack; \ yynewbytes = yystacksize * sizeof (*Stack) + YYSTACK_GAP_MAXIMUM; \ yyptr += yynewbytes / sizeof (*yyptr); \ } \ while (0) #endif #if defined (__STDC__) || defined (__cplusplus) typedef signed char yysigned_char; #else typedef short int yysigned_char; #endif /* YYFINAL -- State number of the termination state. */ #define YYFINAL 52 /* YYLAST -- Last index in YYTABLE. */ #define YYLAST 396 /* YYNTOKENS -- Number of terminals. */ #define YYNTOKENS 23 /* YYNNTS -- Number of nonterminals. */ #define YYNNTS 29 /* YYNRULES -- Number of rules. */ #define YYNRULES 79 /* YYNRULES -- Number of states. */ #define YYNSTATES 128 /* YYTRANSLATE(YYLEX) -- Bison symbol number corresponding to YYLEX. */ #define YYUNDEFTOK 2 #define YYMAXUTOK 269 #define YYTRANSLATE(YYX) \ ((unsigned int) (YYX) <= YYMAXUTOK ? yytranslate[YYX] : YYUNDEFTOK) /* YYTRANSLATE[YYLEX] -- Bison symbol number corresponding to YYLEX. */ static const unsigned char yytranslate[] = { 0, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 21, 15, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 16, 2, 2, 2, 2, 22, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 17, 2, 18, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 19, 2, 20, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14 }; #if YYDEBUG /* YYPRHS[YYN] -- Index of the first RHS symbol of rule number YYN in YYRHS. */ static const unsigned char yyprhs[] = { 0, 0, 3, 5, 8, 9, 11, 13, 15, 18, 21, 24, 28, 30, 32, 36, 37, 40, 43, 46, 49, 51, 54, 56, 58, 60, 63, 66, 69, 72, 75, 77, 79, 81, 85, 87, 89, 91, 93, 95, 99, 103, 106, 110, 113, 117, 120, 124, 127, 129, 133, 136, 140, 143, 145, 149, 151, 153, 157, 161, 165, 168, 172, 175, 179, 182, 184, 188, 190, 194, 196, 200, 204, 207, 211, 215, 218, 220, 224, 226 }; /* YYRHS -- A `-1'-separated list of the rules' RHS. */ static const yysigned_char yyrhs[] = { 24, 0, -1, 25, -1, 11, 27, -1, -1, 33, -1, 26, -1, 34, -1, 5, 26, -1, 6, 26, -1, 3, 26, -1, 29, 26, 32, -1, 25, -1, 28, -1, 29, 28, 30, -1, -1, 7, 28, -1, 5, 28, -1, 6, 28, -1, 3, 28, -1, 12, -1, 29, 13, -1, 14, -1, 13, -1, 14, -1, 31, 32, -1, 5, 33, -1, 6, 33, -1, 7, 33, -1, 3, 33, -1, 4, -1, 8, -1, 9, -1, 29, 33, 32, -1, 10, -1, 35, -1, 39, -1, 42, -1, 49, -1, 29, 37, 30, -1, 29, 38, 30, -1, 15, 27, -1, 5, 31, 38, -1, 5, 37, -1, 6, 31, 38, -1, 6, 37, -1, 3, 31, 38, -1, 3, 37, -1, 36, -1, 38, 31, 36, -1, 38, 31, -1, 17, 40, 18, -1, 17, 18, -1, 41, -1, 40, 21, 41, -1, 25, -1, 48, -1, 29, 43, 30, -1, 29, 47, 30, -1, 5, 31, 47, -1, 5, 43, -1, 6, 31, 47, -1, 6, 43, -1, 3, 31, 47, -1, 3, 43, -1, 33, -1, 22, 25, 31, -1, 27, -1, 44, 16, 45, -1, 46, -1, 47, 31, 36, -1, 47, 31, 46, -1, 47, 31, -1, 25, 16, 27, -1, 19, 50, 20, -1, 19, 20, -1, 51, -1, 50, 21, 51, -1, 25, -1, 48, -1 }; /* YYRLINE[YYN] -- source line where rule number YYN was defined. */ static const unsigned short int yyrline[] = { 0, 56, 56, 60, 65, 70, 71, 74, 75, 80, 85, 94, 100, 101, 104, 109, 113, 121, 126, 131, 145, 146, 149, 152, 155, 156, 164, 169, 174, 182, 186, 194, 207, 208, 218, 219, 220, 221, 222, 228, 232, 238, 244, 249, 254, 259, 264, 268, 274, 278, 283, 292, 296, 302, 306, 313, 314, 320, 325, 332, 337, 342, 347, 352, 356, 362, 363, 369, 379, 396, 397, 409, 417, 426, 434, 438, 444, 445, 454, 461 }; #endif #if YYDEBUG || YYERROR_VERBOSE /* YYTNME[SYMBOL-NUM] -- String name of the symbol SYMBOL-NUM. First, the terminals, then, starting at YYNTOKENS, nonterminals. */ static const char *const yytname[] = { "$end", "error", "$undefined", "YAML_ANCHOR", "YAML_ALIAS", "YAML_TRANSFER", "YAML_TAGURI", "YAML_ITRANSFER", "YAML_WORD", "YAML_PLAIN", "YAML_BLOCK", "YAML_DOCSEP", "YAML_IOPEN", "YAML_INDENT", "YAML_IEND", "'-'", "':'", "'['", "']'", "'{'", "'}'", "','", "'?'", "$accept", "doc", "atom", "ind_rep", "atom_or_empty", "empty", "indent_open", "indent_end", "indent_sep", "indent_flex_end", "word_rep", "struct_rep", "implicit_seq", "basic_seq", "top_imp_seq", "in_implicit_seq", "inline_seq", "in_inline_seq", "inline_seq_atom", "implicit_map", "top_imp_map", "complex_key", "complex_value", "complex_mapping", "in_implicit_map", "basic_mapping", "inline_map", "in_inline_map", "inline_map_atom", 0 }; #endif # ifdef YYPRINT /* YYTOKNUM[YYLEX-NUM] -- Internal token number corresponding to token YYLEX-NUM. */ static const unsigned short int yytoknum[] = { 0, 256, 257, 258, 259, 260, 261, 262, 263, 264, 265, 266, 267, 268, 269, 45, 58, 91, 93, 123, 125, 44, 63 }; # endif /* YYR1[YYN] -- Symbol number of symbol that rule YYN derives. */ static const unsigned char yyr1[] = { 0, 23, 24, 24, 24, 25, 25, 26, 26, 26, 26, 26, 27, 27, 28, 28, 28, 28, 28, 28, 29, 29, 30, 31, 32, 32, 33, 33, 33, 33, 33, 33, 33, 33, 34, 34, 34, 34, 34, 35, 35, 36, 37, 37, 37, 37, 37, 37, 38, 38, 38, 39, 39, 40, 40, 41, 41, 42, 42, 43, 43, 43, 43, 43, 43, 44, 44, 45, 46, 47, 47, 47, 47, 48, 49, 49, 50, 50, 51, 51 }; /* YYR2[YYN] -- Number of symbols composing right hand side of rule YYN. */ static const unsigned char yyr2[] = { 0, 2, 1, 2, 0, 1, 1, 1, 2, 2, 2, 3, 1, 1, 3, 0, 2, 2, 2, 2, 1, 2, 1, 1, 1, 2, 2, 2, 2, 2, 1, 1, 1, 3, 1, 1, 1, 1, 1, 3, 3, 2, 3, 2, 3, 2, 3, 2, 1, 3, 2, 3, 2, 1, 3, 1, 1, 3, 3, 3, 2, 3, 2, 3, 2, 1, 3, 1, 3, 1, 3, 3, 2, 3, 3, 2, 1, 3, 1, 1 }; /* YYDEFACT[STATE-NAME] -- Default rule to reduce with in state STATE-NUM when YYTABLE doesn't specify something else to do. Zero means the default is an error. */ static const unsigned char yydefact[] = { 4, 0, 30, 0, 0, 0, 31, 32, 34, 15, 20, 0, 0, 0, 2, 6, 0, 5, 7, 35, 36, 37, 38, 10, 29, 8, 26, 9, 27, 0, 0, 0, 0, 28, 15, 15, 15, 15, 12, 3, 13, 15, 52, 55, 0, 53, 56, 75, 78, 79, 0, 76, 1, 0, 0, 0, 21, 15, 0, 0, 65, 48, 0, 0, 0, 0, 69, 0, 0, 19, 17, 18, 15, 15, 15, 16, 15, 15, 15, 15, 0, 15, 51, 0, 74, 0, 23, 0, 47, 64, 0, 43, 60, 0, 45, 62, 41, 0, 24, 0, 11, 33, 22, 39, 40, 50, 57, 15, 58, 72, 14, 73, 54, 77, 65, 46, 63, 42, 59, 44, 61, 66, 25, 49, 67, 68, 70, 71 }; /* YYDEFGOTO[NTERM-NUM]. */ static const yysigned_char yydefgoto[] = { -1, 13, 38, 15, 39, 40, 16, 103, 99, 101, 17, 18, 19, 61, 62, 63, 20, 44, 45, 21, 64, 65, 125, 66, 67, 46, 22, 50, 51 }; /* YYPACT[STATE-NUM] -- Index in YYTABLE of the portion describing STATE-NUM. */ #define YYPACT_NINF -97 static const short int yypact[] = { 250, 318, -97, 318, 318, 374, -97, -97, -97, 335, -97, 267, 232, 7, -97, -97, 192, -97, -97, -97, -97, -97, -97, -97, -97, -97, -97, -97, -97, 374, 374, 374, 352, -97, 335, 335, 335, 384, -97, -97, -97, 212, -97, 10, 0, -97, -97, -97, 10, -97, -4, -97, -97, 284, 284, 284, -97, 335, 318, 30, 30, -97, -2, 36, -2, 16, -97, 36, 30, -97, -97, -97, 384, 384, 384, -97, 363, 301, 301, 301, -2, 335, -97, 318, -97, 318, -97, 158, -97, -97, 158, -97, -97, 158, -97, -97, -97, 24, -97, 30, -97, -97, -97, -97, -97, 26, -97, 335, -97, 158, -97, -97, -97, -97, -97, 24, 24, 24, 24, 24, 24, -97, -97, -97, -97, -97, -97, -97 }; /* YYPGOTO[NTERM-NUM]. */ static const yysigned_char yypgoto[] = { -97, -97, 8, 81, -56, 109, 33, -53, 74, -54, -1, -97, -97, -96, -31, -32, -97, -97, -44, -97, 77, -97, -97, -52, 9, -6, -97, -97, -29 }; /* YYTABLE[YYPACT[STATE-NUM]]. What to do in state STATE-NUM. If positive, shift that token. If negative, reduce the rule which number is the opposite. If zero, do what YYDEFACT says. If YYTABLE_NINF, syntax error. */ #define YYTABLE_NINF -1 static const unsigned char yytable[] = { 24, 96, 26, 28, 33, 100, 49, 52, 14, 123, 104, 106, 102, 126, 108, 60, 84, 85, 82, 43, 48, 83, 88, 91, 94, 111, 81, 110, 24, 26, 28, 68, 107, 24, 26, 28, 33, 86, 32, 112, 60, 57, 41, 86, 98, 122, 88, 91, 94, 86, 102, 124, 24, 26, 28, 115, 113, 127, 117, 0, 0, 119, 32, 32, 32, 32, 97, 41, 41, 41, 76, 24, 26, 28, 41, 68, 24, 26, 28, 49, 0, 0, 23, 0, 25, 27, 114, 0, 0, 114, 41, 43, 114, 48, 0, 0, 116, 59, 0, 118, 0, 0, 120, 0, 0, 76, 76, 76, 114, 76, 41, 41, 41, 0, 41, 23, 25, 27, 0, 0, 32, 0, 59, 32, 0, 0, 32, 87, 90, 93, 89, 92, 95, 0, 23, 25, 27, 105, 0, 0, 41, 109, 32, 69, 70, 71, 75, 0, 0, 0, 80, 87, 90, 93, 89, 92, 95, 0, 23, 25, 27, 29, 2, 30, 31, 5, 6, 7, 0, 0, 10, 121, 0, 57, 0, 0, 0, 0, 0, 0, 58, 69, 70, 71, 0, 80, 69, 70, 71, 105, 109, 105, 109, 105, 109, 53, 2, 54, 55, 5, 6, 7, 8, 0, 10, 56, 0, 57, 0, 11, 0, 12, 0, 0, 58, 77, 2, 78, 79, 37, 6, 7, 8, 0, 10, 56, 0, 57, 0, 11, 0, 12, 0, 0, 58, 1, 2, 3, 4, 5, 6, 7, 8, 0, 10, 0, 0, 0, 0, 11, 0, 12, 47, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 0, 0, 0, 0, 11, 0, 12, 1, 2, 3, 4, 5, 6, 7, 8, 0, 10, 0, 0, 0, 0, 11, 42, 12, 53, 2, 54, 55, 5, 6, 7, 8, 0, 10, 86, 0, 0, 0, 11, 0, 12, 77, 2, 78, 79, 37, 6, 7, 8, 0, 10, 86, 0, 0, 0, 11, 0, 12, 1, 2, 3, 4, 5, 6, 7, 8, 0, 10, 0, 0, 0, 0, 11, 0, 12, 34, 2, 35, 36, 37, 6, 7, 8, 0, 10, 0, 0, 0, 0, 11, 0, 12, 29, 2, 30, 31, 5, 6, 7, 0, 0, 10, 56, 72, 2, 73, 74, 37, 6, 7, 0, 0, 10, 56, 29, 2, 30, 31, 5, 6, 7, 0, 0, 10, 72, 2, 73, 74, 37, 6, 7, 0, 0, 10 }; static const yysigned_char yycheck[] = { 1, 57, 3, 4, 5, 59, 12, 0, 0, 105, 63, 64, 14, 109, 67, 16, 20, 21, 18, 11, 12, 21, 53, 54, 55, 81, 16, 80, 29, 30, 31, 32, 16, 34, 35, 36, 37, 13, 5, 83, 41, 15, 9, 13, 14, 99, 77, 78, 79, 13, 14, 107, 53, 54, 55, 87, 85, 109, 90, -1, -1, 93, 29, 30, 31, 32, 58, 34, 35, 36, 37, 72, 73, 74, 41, 76, 77, 78, 79, 85, -1, -1, 1, -1, 3, 4, 87, -1, -1, 90, 57, 83, 93, 85, -1, -1, 87, 16, -1, 90, -1, -1, 93, -1, -1, 72, 73, 74, 109, 76, 77, 78, 79, -1, 81, 34, 35, 36, -1, -1, 87, -1, 41, 90, -1, -1, 93, 53, 54, 55, 53, 54, 55, -1, 53, 54, 55, 63, -1, -1, 107, 67, 109, 34, 35, 36, 37, -1, -1, -1, 41, 77, 78, 79, 77, 78, 79, -1, 77, 78, 79, 3, 4, 5, 6, 7, 8, 9, -1, -1, 12, 97, -1, 15, -1, -1, -1, -1, -1, -1, 22, 72, 73, 74, -1, 76, 77, 78, 79, 115, 116, 117, 118, 119, 120, 3, 4, 5, 6, 7, 8, 9, 10, -1, 12, 13, -1, 15, -1, 17, -1, 19, -1, -1, 22, 3, 4, 5, 6, 7, 8, 9, 10, -1, 12, 13, -1, 15, -1, 17, -1, 19, -1, -1, 22, 3, 4, 5, 6, 7, 8, 9, 10, -1, 12, -1, -1, -1, -1, 17, -1, 19, 20, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, -1, -1, -1, -1, 17, -1, 19, 3, 4, 5, 6, 7, 8, 9, 10, -1, 12, -1, -1, -1, -1, 17, 18, 19, 3, 4, 5, 6, 7, 8, 9, 10, -1, 12, 13, -1, -1, -1, 17, -1, 19, 3, 4, 5, 6, 7, 8, 9, 10, -1, 12, 13, -1, -1, -1, 17, -1, 19, 3, 4, 5, 6, 7, 8, 9, 10, -1, 12, -1, -1, -1, -1, 17, -1, 19, 3, 4, 5, 6, 7, 8, 9, 10, -1, 12, -1, -1, -1, -1, 17, -1, 19, 3, 4, 5, 6, 7, 8, 9, -1, -1, 12, 13, 3, 4, 5, 6, 7, 8, 9, -1, -1, 12, 13, 3, 4, 5, 6, 7, 8, 9, -1, -1, 12, 3, 4, 5, 6, 7, 8, 9, -1, -1, 12 }; /* YYSTOS[STATE-NUM] -- The (internal number of the) accessing symbol of state STATE-NUM. */ static const unsigned char yystos[] = { 0, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 17, 19, 24, 25, 26, 29, 33, 34, 35, 39, 42, 49, 26, 33, 26, 33, 26, 33, 3, 5, 6, 29, 33, 3, 5, 6, 7, 25, 27, 28, 29, 18, 25, 40, 41, 48, 20, 25, 48, 50, 51, 0, 3, 5, 6, 13, 15, 22, 26, 33, 36, 37, 38, 43, 44, 46, 47, 33, 28, 28, 28, 3, 5, 6, 28, 29, 3, 5, 6, 28, 16, 18, 21, 20, 21, 13, 31, 37, 43, 31, 37, 43, 31, 37, 43, 27, 25, 14, 31, 32, 32, 14, 30, 30, 31, 30, 16, 30, 31, 30, 27, 41, 51, 33, 38, 47, 38, 47, 38, 47, 31, 32, 36, 27, 45, 36, 46 }; #if ! defined (YYSIZE_T) && defined (__SIZE_TYPE__) # define YYSIZE_T __SIZE_TYPE__ #endif #if ! defined (YYSIZE_T) && defined (size_t) # define YYSIZE_T size_t #endif #if ! defined (YYSIZE_T) # if defined (__STDC__) || defined (__cplusplus) # include /* INFRINGES ON USER NAME SPACE */ # define YYSIZE_T size_t # endif #endif #if ! defined (YYSIZE_T) # define YYSIZE_T unsigned int #endif #define yyerrok (yyerrstatus = 0) #define yyclearin (yychar = YYEMPTY) #define YYEMPTY (-2) #define YYEOF 0 #define YYACCEPT goto yyacceptlab #define YYABORT goto yyabortlab #define YYERROR goto yyerrorlab /* Like YYERROR except do call yyerror. This remains here temporarily to ease the transition to the new meaning of YYERROR, for GCC. Once GCC version 2 has supplanted version 1, this can go. */ #define YYFAIL goto yyerrlab #define YYRECOVERING() (!!yyerrstatus) #define YYBACKUP(Token, Value) \ do \ if (yychar == YYEMPTY && yylen == 1) \ { \ yychar = (Token); \ yylval = (Value); \ yytoken = YYTRANSLATE (yychar); \ YYPOPSTACK; \ goto yybackup; \ } \ else \ { \ yyerror ("syntax error: cannot back up");\ YYERROR; \ } \ while (0) #define YYTERROR 1 #define YYERRCODE 256 /* YYLLOC_DEFAULT -- Compute the default location (before the actions are run). */ #ifndef YYLLOC_DEFAULT # define YYLLOC_DEFAULT(Current, Rhs, N) \ ((Current).first_line = (Rhs)[1].first_line, \ (Current).first_column = (Rhs)[1].first_column, \ (Current).last_line = (Rhs)[N].last_line, \ (Current).last_column = (Rhs)[N].last_column) #endif /* YYLEX -- calling `yylex' with the right arguments. */ #ifdef YYLEX_PARAM # define YYLEX yylex (&yylval, YYLEX_PARAM) #else # define YYLEX yylex (&yylval) #endif /* Enable debugging if requested. */ #if YYDEBUG # ifndef YYFPRINTF # include /* INFRINGES ON USER NAME SPACE */ # define YYFPRINTF fprintf # endif # define YYDPRINTF(Args) \ do { \ if (yydebug) \ YYFPRINTF Args; \ } while (0) # define YYDSYMPRINT(Args) \ do { \ if (yydebug) \ yysymprint Args; \ } while (0) # define YYDSYMPRINTF(Title, Token, Value, Location) \ do { \ if (yydebug) \ { \ YYFPRINTF (stderr, "%s ", Title); \ yysymprint (stderr, \ Token, Value); \ YYFPRINTF (stderr, "\n"); \ } \ } while (0) /*------------------------------------------------------------------. | yy_stack_print -- Print the state stack from its BOTTOM up to its | | TOP (included). | `------------------------------------------------------------------*/ #if defined (__STDC__) || defined (__cplusplus) static void yy_stack_print (short int *bottom, short int *top) #else static void yy_stack_print (bottom, top) short int *bottom; short int *top; #endif { YYFPRINTF (stderr, "Stack now"); for (/* Nothing. */; bottom <= top; ++bottom) YYFPRINTF (stderr, " %d", *bottom); YYFPRINTF (stderr, "\n"); } # define YY_STACK_PRINT(Bottom, Top) \ do { \ if (yydebug) \ yy_stack_print ((Bottom), (Top)); \ } while (0) /*------------------------------------------------. | Report that the YYRULE is going to be reduced. | `------------------------------------------------*/ #if defined (__STDC__) || defined (__cplusplus) static void yy_reduce_print (int yyrule) #else static void yy_reduce_print (yyrule) int yyrule; #endif { int yyi; unsigned int yylno = yyrline[yyrule]; YYFPRINTF (stderr, "Reducing stack by rule %d (line %u), ", yyrule - 1, yylno); /* Print the symbols being reduced, and their result. */ for (yyi = yyprhs[yyrule]; 0 <= yyrhs[yyi]; yyi++) YYFPRINTF (stderr, "%s ", yytname [yyrhs[yyi]]); YYFPRINTF (stderr, "-> %s\n", yytname [yyr1[yyrule]]); } # define YY_REDUCE_PRINT(Rule) \ do { \ if (yydebug) \ yy_reduce_print (Rule); \ } while (0) /* Nonzero means print parse trace. It is left uninitialized so that multiple parsers can coexist. */ int yydebug; #else /* !YYDEBUG */ # define YYDPRINTF(Args) # define YYDSYMPRINT(Args) # define YYDSYMPRINTF(Title, Token, Value, Location) # define YY_STACK_PRINT(Bottom, Top) # define YY_REDUCE_PRINT(Rule) #endif /* !YYDEBUG */ /* YYINITDEPTH -- initial size of the parser's stacks. */ #ifndef YYINITDEPTH # define YYINITDEPTH 200 #endif /* YYMAXDEPTH -- maximum size the stacks can grow to (effective only if the built-in stack extension method is used). Do not make this value too large; the results are undefined if SIZE_MAX < YYSTACK_BYTES (YYMAXDEPTH) evaluated with infinite-precision integer arithmetic. */ #if defined (YYMAXDEPTH) && YYMAXDEPTH == 0 # undef YYMAXDEPTH #endif #ifndef YYMAXDEPTH # define YYMAXDEPTH 10000 #endif #if YYERROR_VERBOSE # ifndef yystrlen # if defined (__GLIBC__) && defined (_STRING_H) # define yystrlen strlen # else /* Return the length of YYSTR. */ static YYSIZE_T # if defined (__STDC__) || defined (__cplusplus) yystrlen (const char *yystr) # else yystrlen (yystr) const char *yystr; # endif { register const char *yys = yystr; while (*yys++ != '\0') continue; return yys - yystr - 1; } # endif # endif # ifndef yystpcpy # if defined (__GLIBC__) && defined (_STRING_H) && defined (_GNU_SOURCE) # define yystpcpy stpcpy # else /* Copy YYSRC to YYDEST, returning the address of the terminating '\0' in YYDEST. */ static char * # if defined (__STDC__) || defined (__cplusplus) yystpcpy (char *yydest, const char *yysrc) # else yystpcpy (yydest, yysrc) char *yydest; const char *yysrc; # endif { register char *yyd = yydest; register const char *yys = yysrc; while ((*yyd++ = *yys++) != '\0') continue; return yyd - 1; } # endif # endif #endif /* !YYERROR_VERBOSE */ #if YYDEBUG /*--------------------------------. | Print this symbol on YYOUTPUT. | `--------------------------------*/ #if defined (__STDC__) || defined (__cplusplus) static void yysymprint (FILE *yyoutput, int yytype, YYSTYPE *yyvaluep) #else static void yysymprint (yyoutput, yytype, yyvaluep) FILE *yyoutput; int yytype; YYSTYPE *yyvaluep; #endif { /* Pacify ``unused variable'' warnings. */ (void) yyvaluep; if (yytype < YYNTOKENS) { YYFPRINTF (yyoutput, "token %s (", yytname[yytype]); # ifdef YYPRINT YYPRINT (yyoutput, yytoknum[yytype], *yyvaluep); # endif } else YYFPRINTF (yyoutput, "nterm %s (", yytname[yytype]); switch (yytype) { default: break; } YYFPRINTF (yyoutput, ")"); } #endif /* ! YYDEBUG */ /*-----------------------------------------------. | Release the memory associated to this symbol. | `-----------------------------------------------*/ #if defined (__STDC__) || defined (__cplusplus) static void yydestruct (int yytype, YYSTYPE *yyvaluep) #else static void yydestruct (yytype, yyvaluep) int yytype; YYSTYPE *yyvaluep; #endif { /* Pacify ``unused variable'' warnings. */ (void) yyvaluep; switch (yytype) { default: break; } } /* Prevent warnings from -Wmissing-prototypes. */ #ifdef YYPARSE_PARAM # if defined (__STDC__) || defined (__cplusplus) int yyparse (void *YYPARSE_PARAM); # else int yyparse (); # endif #else /* ! YYPARSE_PARAM */ #if defined (__STDC__) || defined (__cplusplus) int yyparse (void); #else int yyparse (); #endif #endif /* ! YYPARSE_PARAM */ /*----------. | yyparse. | `----------*/ #ifdef YYPARSE_PARAM # if defined (__STDC__) || defined (__cplusplus) int yyparse (void *YYPARSE_PARAM) # else int yyparse (YYPARSE_PARAM) void *YYPARSE_PARAM; # endif #else /* ! YYPARSE_PARAM */ #if defined (__STDC__) || defined (__cplusplus) int yyparse (void) #else int yyparse () #endif #endif { /* The lookahead symbol. */ int yychar; /* The semantic value of the lookahead symbol. */ YYSTYPE yylval; /* Number of syntax errors so far. */ int yynerrs; register int yystate; register int yyn; int yyresult; /* Number of tokens to shift before error messages enabled. */ int yyerrstatus; /* Lookahead token as an internal (translated) token number. */ int yytoken = 0; /* Three stacks and their tools: `yyss': related to states, `yyvs': related to semantic values, `yyls': related to locations. Refer to the stacks thru separate pointers, to allow yyoverflow to reallocate them elsewhere. */ /* The state stack. */ short int yyssa[YYINITDEPTH]; short int *yyss = yyssa; register short int *yyssp; /* The semantic value stack. */ YYSTYPE yyvsa[YYINITDEPTH]; YYSTYPE *yyvs = yyvsa; register YYSTYPE *yyvsp; #define YYPOPSTACK (yyvsp--, yyssp--) YYSIZE_T yystacksize = YYINITDEPTH; /* The variables used to return semantic value and location from the action routines. */ YYSTYPE yyval; /* When reducing, the number of symbols on the RHS of the reduced rule. */ int yylen; YYDPRINTF ((stderr, "Starting parse\n")); yystate = 0; yyerrstatus = 0; yynerrs = 0; yychar = YYEMPTY; /* Cause a token to be read. */ /* Initialize stack pointers. Waste one element of value and location stack so that they stay on the same level as the state stack. The wasted elements are never initialized. */ yyssp = yyss; yyvsp = yyvs; goto yysetstate; /*------------------------------------------------------------. | yynewstate -- Push a new state, which is found in yystate. | `------------------------------------------------------------*/ yynewstate: /* In all cases, when you get here, the value and location stacks have just been pushed. so pushing a state here evens the stacks. */ yyssp++; yysetstate: *yyssp = yystate; if (yyss + yystacksize - 1 <= yyssp) { /* Get the current used size of the three stacks, in elements. */ YYSIZE_T yysize = yyssp - yyss + 1; #ifdef yyoverflow { /* Give user a chance to reallocate the stack. Use copies of these so that the &'s don't force the real ones into memory. */ YYSTYPE *yyvs1 = yyvs; short int *yyss1 = yyss; /* Each stack pointer address is followed by the size of the data in use in that stack, in bytes. This used to be a conditional around just the two extra args, but that might be undefined if yyoverflow is a macro. */ yyoverflow ("parser stack overflow", &yyss1, yysize * sizeof (*yyssp), &yyvs1, yysize * sizeof (*yyvsp), &yystacksize); yyss = yyss1; yyvs = yyvs1; } #else /* no yyoverflow */ # ifndef YYSTACK_RELOCATE goto yyoverflowlab; # else /* Extend the stack our own way. */ if (YYMAXDEPTH <= yystacksize) goto yyoverflowlab; yystacksize *= 2; if (YYMAXDEPTH < yystacksize) yystacksize = YYMAXDEPTH; { short int *yyss1 = yyss; union yyalloc *yyptr = (union yyalloc *) YYSTACK_ALLOC (YYSTACK_BYTES (yystacksize)); if (! yyptr) goto yyoverflowlab; YYSTACK_RELOCATE (yyss); YYSTACK_RELOCATE (yyvs); # undef YYSTACK_RELOCATE if (yyss1 != yyssa) YYSTACK_FREE (yyss1); } # endif #endif /* no yyoverflow */ yyssp = yyss + yysize - 1; yyvsp = yyvs + yysize - 1; YYDPRINTF ((stderr, "Stack size increased to %lu\n", (unsigned long int) yystacksize)); if (yyss + yystacksize - 1 <= yyssp) YYABORT; } YYDPRINTF ((stderr, "Entering state %d\n", yystate)); goto yybackup; /*-----------. | yybackup. | `-----------*/ yybackup: /* Do appropriate processing given the current state. */ /* Read a lookahead token if we need one and don't already have one. */ /* yyresume: */ /* First try to decide what to do without reference to lookahead token. */ yyn = yypact[yystate]; if (yyn == YYPACT_NINF) goto yydefault; /* Not known => get a lookahead token if don't already have one. */ /* YYCHAR is either YYEMPTY or YYEOF or a valid lookahead symbol. */ if (yychar == YYEMPTY) { YYDPRINTF ((stderr, "Reading a token: ")); yychar = YYLEX; } if (yychar <= YYEOF) { yychar = yytoken = YYEOF; YYDPRINTF ((stderr, "Now at end of input.\n")); } else { yytoken = YYTRANSLATE (yychar); YYDSYMPRINTF ("Next token is", yytoken, &yylval, &yylloc); } /* If the proper action on seeing token YYTOKEN is to reduce or to detect an error, take that action. */ yyn += yytoken; if (yyn < 0 || YYLAST < yyn || yycheck[yyn] != yytoken) goto yydefault; yyn = yytable[yyn]; if (yyn <= 0) { if (yyn == 0 || yyn == YYTABLE_NINF) goto yyerrlab; yyn = -yyn; goto yyreduce; } if (yyn == YYFINAL) YYACCEPT; /* Shift the lookahead token. */ YYDPRINTF ((stderr, "Shifting token %s, ", yytname[yytoken])); /* Discard the token being shifted unless it is eof. */ if (yychar != YYEOF) yychar = YYEMPTY; *++yyvsp = yylval; /* Count tokens shifted since error; after three, turn off error status. */ if (yyerrstatus) yyerrstatus--; yystate = yyn; goto yynewstate; /*-----------------------------------------------------------. | yydefault -- do the default action for the current state. | `-----------------------------------------------------------*/ yydefault: yyn = yydefact[yystate]; if (yyn == 0) goto yyerrlab; goto yyreduce; /*-----------------------------. | yyreduce -- Do a reduction. | `-----------------------------*/ yyreduce: /* yyn is the number of a rule to reduce with. */ yylen = yyr2[yyn]; /* If YYLEN is nonzero, implement the default value of the action: `$$ = $1'. Otherwise, the following line sets YYVAL to garbage. This behavior is undocumented and Bison users should not rely upon it. Assigning to YYVAL unconditionally makes the parser a bit smaller, and it avoids a GCC warning that YYVAL may be used uninitialized. */ yyval = yyvsp[1-yylen]; YY_REDUCE_PRINT (yyn); switch (yyn) { case 2: #line 57 "gram.y" { ((SyckParser *)parser)->root = syck_hdlr_add_node( (SyckParser *)parser, yyvsp[0].nodeData ); } break; case 3: #line 61 "gram.y" { ((SyckParser *)parser)->root = syck_hdlr_add_node( (SyckParser *)parser, yyvsp[0].nodeData ); } break; case 4: #line 65 "gram.y" { ((SyckParser *)parser)->eof = 1; } break; case 8: #line 76 "gram.y" { syck_add_transfer( yyvsp[-1].name, yyvsp[0].nodeData, ((SyckParser *)parser)->taguri_expansion ); yyval.nodeData = yyvsp[0].nodeData; } break; case 9: #line 81 "gram.y" { syck_add_transfer( yyvsp[-1].name, yyvsp[0].nodeData, 0 ); yyval.nodeData = yyvsp[0].nodeData; } break; case 10: #line 86 "gram.y" { /* * _Anchors_: The language binding must keep a separate symbol table * for anchors. The actual ID in the symbol table is returned to the * higher nodes, though. */ yyval.nodeData = syck_hdlr_add_anchor( (SyckParser *)parser, yyvsp[-1].name, yyvsp[0].nodeData ); } break; case 11: #line 95 "gram.y" { yyval.nodeData = yyvsp[-1].nodeData; } break; case 14: #line 105 "gram.y" { yyval.nodeData = yyvsp[-1].nodeData; } break; case 15: #line 109 "gram.y" { NULL_NODE( parser, n ); yyval.nodeData = n; } break; case 16: #line 114 "gram.y" { if ( ((SyckParser *)parser)->implicit_typing == 1 ) { try_tag_implicit( yyvsp[0].nodeData, ((SyckParser *)parser)->taguri_expansion ); } yyval.nodeData = yyvsp[0].nodeData; } break; case 17: #line 122 "gram.y" { syck_add_transfer( yyvsp[-1].name, yyvsp[0].nodeData, ((SyckParser *)parser)->taguri_expansion ); yyval.nodeData = yyvsp[0].nodeData; } break; case 18: #line 127 "gram.y" { syck_add_transfer( yyvsp[-1].name, yyvsp[0].nodeData, 0 ); yyval.nodeData = yyvsp[0].nodeData; } break; case 19: #line 132 "gram.y" { /* * _Anchors_: The language binding must keep a separate symbol table * for anchors. The actual ID in the symbol table is returned to the * higher nodes, though. */ yyval.nodeData = syck_hdlr_add_anchor( (SyckParser *)parser, yyvsp[-1].name, yyvsp[0].nodeData ); } break; case 26: #line 165 "gram.y" { syck_add_transfer( yyvsp[-1].name, yyvsp[0].nodeData, ((SyckParser *)parser)->taguri_expansion ); yyval.nodeData = yyvsp[0].nodeData; } break; case 27: #line 170 "gram.y" { syck_add_transfer( yyvsp[-1].name, yyvsp[0].nodeData, 0 ); yyval.nodeData = yyvsp[0].nodeData; } break; case 28: #line 175 "gram.y" { if ( ((SyckParser *)parser)->implicit_typing == 1 ) { try_tag_implicit( yyvsp[0].nodeData, ((SyckParser *)parser)->taguri_expansion ); } yyval.nodeData = yyvsp[0].nodeData; } break; case 29: #line 183 "gram.y" { yyval.nodeData = syck_hdlr_add_anchor( (SyckParser *)parser, yyvsp[-1].name, yyvsp[0].nodeData ); } break; case 30: #line 187 "gram.y" { /* * _Aliases_: The anchor symbol table is scanned for the anchor name. * The anchor's ID in the language's symbol table is returned. */ yyval.nodeData = syck_hdlr_get_anchor( (SyckParser *)parser, yyvsp[0].name ); } break; case 31: #line 195 "gram.y" { SyckNode *n = yyvsp[0].nodeData; if ( ((SyckParser *)parser)->taguri_expansion == 1 ) { n->type_id = syck_taguri( YAML_DOMAIN, "str", 3 ); } else { n->type_id = syck_strndup( "str", 3 ); } yyval.nodeData = n; } break; case 33: #line 209 "gram.y" { yyval.nodeData = yyvsp[-1].nodeData; } break; case 39: #line 229 "gram.y" { yyval.nodeData = yyvsp[-1].nodeData; } break; case 40: #line 233 "gram.y" { yyval.nodeData = yyvsp[-1].nodeData; } break; case 41: #line 239 "gram.y" { yyval.nodeId = syck_hdlr_add_node( (SyckParser *)parser, yyvsp[0].nodeData ); } break; case 42: #line 245 "gram.y" { syck_add_transfer( yyvsp[-2].name, yyvsp[0].nodeData, ((SyckParser *)parser)->taguri_expansion ); yyval.nodeData = yyvsp[0].nodeData; } break; case 43: #line 250 "gram.y" { syck_add_transfer( yyvsp[-1].name, yyvsp[0].nodeData, ((SyckParser *)parser)->taguri_expansion ); yyval.nodeData = yyvsp[0].nodeData; } break; case 44: #line 255 "gram.y" { syck_add_transfer( yyvsp[-2].name, yyvsp[0].nodeData, 0 ); yyval.nodeData = yyvsp[0].nodeData; } break; case 45: #line 260 "gram.y" { syck_add_transfer( yyvsp[-1].name, yyvsp[0].nodeData, 0 ); yyval.nodeData = yyvsp[0].nodeData; } break; case 46: #line 265 "gram.y" { yyval.nodeData = syck_hdlr_add_anchor( (SyckParser *)parser, yyvsp[-2].name, yyvsp[0].nodeData ); } break; case 47: #line 269 "gram.y" { yyval.nodeData = syck_hdlr_add_anchor( (SyckParser *)parser, yyvsp[-1].name, yyvsp[0].nodeData ); } break; case 48: #line 275 "gram.y" { yyval.nodeData = syck_new_seq( yyvsp[0].nodeId ); } break; case 49: #line 279 "gram.y" { syck_seq_add( yyvsp[-2].nodeData, yyvsp[0].nodeId ); yyval.nodeData = yyvsp[-2].nodeData; } break; case 50: #line 284 "gram.y" { yyval.nodeData = yyvsp[-1].nodeData; } break; case 51: #line 293 "gram.y" { yyval.nodeData = yyvsp[-1].nodeData; } break; case 52: #line 297 "gram.y" { yyval.nodeData = syck_alloc_seq(); } break; case 53: #line 303 "gram.y" { yyval.nodeData = syck_new_seq( syck_hdlr_add_node( (SyckParser *)parser, yyvsp[0].nodeData ) ); } break; case 54: #line 307 "gram.y" { syck_seq_add( yyvsp[-2].nodeData, syck_hdlr_add_node( (SyckParser *)parser, yyvsp[0].nodeData ) ); yyval.nodeData = yyvsp[-2].nodeData; } break; case 57: #line 321 "gram.y" { apply_seq_in_map( (SyckParser *)parser, yyvsp[-1].nodeData ); yyval.nodeData = yyvsp[-1].nodeData; } break; case 58: #line 326 "gram.y" { apply_seq_in_map( (SyckParser *)parser, yyvsp[-1].nodeData ); yyval.nodeData = yyvsp[-1].nodeData; } break; case 59: #line 333 "gram.y" { syck_add_transfer( yyvsp[-2].name, yyvsp[0].nodeData, ((SyckParser *)parser)->taguri_expansion ); yyval.nodeData = yyvsp[0].nodeData; } break; case 60: #line 338 "gram.y" { syck_add_transfer( yyvsp[-1].name, yyvsp[0].nodeData, ((SyckParser *)parser)->taguri_expansion ); yyval.nodeData = yyvsp[0].nodeData; } break; case 61: #line 343 "gram.y" { syck_add_transfer( yyvsp[-2].name, yyvsp[0].nodeData, 0 ); yyval.nodeData = yyvsp[0].nodeData; } break; case 62: #line 348 "gram.y" { syck_add_transfer( yyvsp[-1].name, yyvsp[0].nodeData, 0 ); yyval.nodeData = yyvsp[0].nodeData; } break; case 63: #line 353 "gram.y" { yyval.nodeData = syck_hdlr_add_anchor( (SyckParser *)parser, yyvsp[-2].name, yyvsp[0].nodeData ); } break; case 64: #line 357 "gram.y" { yyval.nodeData = syck_hdlr_add_anchor( (SyckParser *)parser, yyvsp[-1].name, yyvsp[0].nodeData ); } break; case 66: #line 364 "gram.y" { yyval.nodeData = yyvsp[-1].nodeData; } break; case 68: #line 380 "gram.y" { yyval.nodeData = syck_new_map( syck_hdlr_add_node( (SyckParser *)parser, yyvsp[-2].nodeData ), syck_hdlr_add_node( (SyckParser *)parser, yyvsp[0].nodeData ) ); } break; case 70: #line 398 "gram.y" { if ( yyvsp[-2].nodeData->shortcut == NULL ) { yyvsp[-2].nodeData->shortcut = syck_new_seq( yyvsp[0].nodeId ); } else { syck_seq_add( yyvsp[-2].nodeData->shortcut, yyvsp[0].nodeId ); } yyval.nodeData = yyvsp[-2].nodeData; } break; case 71: #line 410 "gram.y" { apply_seq_in_map( (SyckParser *)parser, yyvsp[-2].nodeData ); syck_map_update( yyvsp[-2].nodeData, yyvsp[0].nodeData ); syck_free_node( yyvsp[0].nodeData ); yyvsp[0].nodeData = NULL; yyval.nodeData = yyvsp[-2].nodeData; } break; case 72: #line 418 "gram.y" { yyval.nodeData = yyvsp[-1].nodeData; } break; case 73: #line 427 "gram.y" { yyval.nodeData = syck_new_map( syck_hdlr_add_node( (SyckParser *)parser, yyvsp[-2].nodeData ), syck_hdlr_add_node( (SyckParser *)parser, yyvsp[0].nodeData ) ); } break; case 74: #line 435 "gram.y" { yyval.nodeData = yyvsp[-1].nodeData; } break; case 75: #line 439 "gram.y" { yyval.nodeData = syck_alloc_map(); } break; case 77: #line 446 "gram.y" { syck_map_update( yyvsp[-2].nodeData, yyvsp[0].nodeData ); syck_free_node( yyvsp[0].nodeData ); yyvsp[0].nodeData = NULL; yyval.nodeData = yyvsp[-2].nodeData; } break; case 78: #line 455 "gram.y" { NULL_NODE( parser, n ); yyval.nodeData = syck_new_map( syck_hdlr_add_node( (SyckParser *)parser, yyvsp[0].nodeData ), syck_hdlr_add_node( (SyckParser *)parser, n ) ); } break; } /* Line 1010 of yacc.c. */ #line 1651 "gram.c" yyvsp -= yylen; yyssp -= yylen; YY_STACK_PRINT (yyss, yyssp); *++yyvsp = yyval; /* Now `shift' the result of the reduction. Determine what state that goes to, based on the state we popped back to and the rule number reduced by. */ yyn = yyr1[yyn]; yystate = yypgoto[yyn - YYNTOKENS] + *yyssp; if (0 <= yystate && yystate <= YYLAST && yycheck[yystate] == *yyssp) yystate = yytable[yystate]; else yystate = yydefgoto[yyn - YYNTOKENS]; goto yynewstate; /*------------------------------------. | yyerrlab -- here on detecting error | `------------------------------------*/ yyerrlab: /* If not already recovering from an error, report this error. */ if (!yyerrstatus) { ++yynerrs; #if YYERROR_VERBOSE yyn = yypact[yystate]; if (YYPACT_NINF < yyn && yyn < YYLAST) { YYSIZE_T yysize = 0; int yytype = YYTRANSLATE (yychar); const char* yyprefix; char *yymsg; int yyx; /* Start YYX at -YYN if negative to avoid negative indexes in YYCHECK. */ int yyxbegin = yyn < 0 ? -yyn : 0; /* Stay within bounds of both yycheck and yytname. */ int yychecklim = YYLAST - yyn; int yyxend = yychecklim < YYNTOKENS ? yychecklim : YYNTOKENS; int yycount = 0; yyprefix = ", expecting "; for (yyx = yyxbegin; yyx < yyxend; ++yyx) if (yycheck[yyx + yyn] == yyx && yyx != YYTERROR) { yysize += yystrlen (yyprefix) + yystrlen (yytname [yyx]); yycount += 1; if (yycount == 5) { yysize = 0; break; } } yysize += (sizeof ("syntax error, unexpected ") + yystrlen (yytname[yytype])); yymsg = (char *) YYSTACK_ALLOC (yysize); if (yymsg != 0) { char *yyp = yystpcpy (yymsg, "syntax error, unexpected "); yyp = yystpcpy (yyp, yytname[yytype]); if (yycount < 5) { yyprefix = ", expecting "; for (yyx = yyxbegin; yyx < yyxend; ++yyx) if (yycheck[yyx + yyn] == yyx && yyx != YYTERROR) { yyp = yystpcpy (yyp, yyprefix); yyp = yystpcpy (yyp, yytname[yyx]); yyprefix = " or "; } } yyerror (yymsg); YYSTACK_FREE (yymsg); } else yyerror ("syntax error; also virtual memory exhausted"); } else #endif /* YYERROR_VERBOSE */ yyerror ("syntax error"); } if (yyerrstatus == 3) { /* If just tried and failed to reuse lookahead token after an error, discard it. */ if (yychar <= YYEOF) { /* If at end of input, pop the error token, then the rest of the stack, then return failure. */ if (yychar == YYEOF) for (;;) { YYPOPSTACK; if (yyssp == yyss) YYABORT; YYDSYMPRINTF ("Error: popping", yystos[*yyssp], yyvsp, yylsp); yydestruct (yystos[*yyssp], yyvsp); } } else { YYDSYMPRINTF ("Error: discarding", yytoken, &yylval, &yylloc); yydestruct (yytoken, &yylval); yychar = YYEMPTY; } } /* Else will try to reuse lookahead token after shifting the error token. */ goto yyerrlab1; /*---------------------------------------------------. | yyerrorlab -- error raised explicitly by YYERROR. | `---------------------------------------------------*/ yyerrorlab: #ifdef __GNUC__ /* Pacify GCC when the user code never invokes YYERROR and the label yyerrorlab therefore never appears in user code. */ if (0) goto yyerrorlab; #endif yyvsp -= yylen; yyssp -= yylen; yystate = *yyssp; goto yyerrlab1; /*-------------------------------------------------------------. | yyerrlab1 -- common code for both syntax error and YYERROR. | `-------------------------------------------------------------*/ yyerrlab1: yyerrstatus = 3; /* Each real token shifted decrements this. */ for (;;) { yyn = yypact[yystate]; if (yyn != YYPACT_NINF) { yyn += YYTERROR; if (0 <= yyn && yyn <= YYLAST && yycheck[yyn] == YYTERROR) { yyn = yytable[yyn]; if (0 < yyn) break; } } /* Pop the current state because it cannot handle the error token. */ if (yyssp == yyss) YYABORT; YYDSYMPRINTF ("Error: popping", yystos[*yyssp], yyvsp, yylsp); yydestruct (yystos[yystate], yyvsp); YYPOPSTACK; yystate = *yyssp; YY_STACK_PRINT (yyss, yyssp); } if (yyn == YYFINAL) YYACCEPT; YYDPRINTF ((stderr, "Shifting error token, ")); *++yyvsp = yylval; yystate = yyn; goto yynewstate; /*-------------------------------------. | yyacceptlab -- YYACCEPT comes here. | `-------------------------------------*/ yyacceptlab: yyresult = 0; goto yyreturn; /*-----------------------------------. | yyabortlab -- YYABORT comes here. | `-----------------------------------*/ yyabortlab: yyresult = 1; goto yyreturn; #ifndef yyoverflow /*----------------------------------------------. | yyoverflowlab -- parser overflow comes here. | `----------------------------------------------*/ yyoverflowlab: yyerror ("parser stack overflow"); yyresult = 2; /* Fall through. */ #endif yyreturn: #ifndef yyoverflow if (yyss != yyssa) YYSTACK_FREE (yyss); #endif return yyresult; } #line 464 "gram.y" void apply_seq_in_map( SyckParser *parser, SyckNode *n ) { long map_len; if ( n->shortcut == NULL ) { return; } map_len = syck_map_count( n ); syck_map_assign( n, map_value, map_len - 1, syck_hdlr_add_node( parser, n->shortcut ) ); n->shortcut = NULL; } ruby-mkrf-0.2.3.orig/test/sample_files/syck-0.55/ext/ruby/ext/syck/node.c0000644000000000000000000001673511672453175024521 0ustar rootroot/* * node.c * * $Author: why $ * $Date: 2005/04/13 06:27:54 $ * * Copyright (C) 2003 why the lucky stiff */ #include "syck.h" /* * Node allocation functions */ SyckNode * syck_alloc_node( enum syck_kind_tag type ) { SyckNode *s; s = S_ALLOC( SyckNode ); s->kind = type; s->id = 0; s->type_id = NULL; s->anchor = NULL; s->shortcut = NULL; return s; } void syck_free_node( SyckNode *n ) { syck_free_members( n ); if ( n->type_id != NULL ) { S_FREE( n->type_id ); n->type_id = NULL; } if ( n->anchor != NULL ) { S_FREE( n->anchor ); n->anchor = NULL; } S_FREE( n ); } SyckNode * syck_alloc_map() { SyckNode *n; struct SyckMap *m; m = S_ALLOC( struct SyckMap ); m->style = map_none; m->idx = 0; m->capa = ALLOC_CT; m->keys = S_ALLOC_N( SYMID, m->capa ); m->values = S_ALLOC_N( SYMID, m->capa ); n = syck_alloc_node( syck_map_kind ); n->data.pairs = m; return n; } SyckNode * syck_alloc_seq() { SyckNode *n; struct SyckSeq *s; s = S_ALLOC( struct SyckSeq ); s->style = seq_none; s->idx = 0; s->capa = ALLOC_CT; s->items = S_ALLOC_N( SYMID, s->capa ); n = syck_alloc_node( syck_seq_kind ); n->data.list = s; return n; } SyckNode * syck_alloc_str() { SyckNode *n; struct SyckStr *s; s = S_ALLOC( struct SyckStr ); s->len = 0; s->ptr = NULL; s->style = scalar_none; n = syck_alloc_node( syck_str_kind ); n->data.str = s; return n; } SyckNode * syck_new_str( char *str, enum scalar_style style ) { return syck_new_str2( str, strlen( str ), style ); } SyckNode * syck_new_str2( char *str, long len, enum scalar_style style ) { SyckNode *n; n = syck_alloc_str(); n->data.str->ptr = S_ALLOC_N( char, len + 1 ); n->data.str->len = len; n->data.str->style = style; memcpy( n->data.str->ptr, str, len ); n->data.str->ptr[len] = '\0'; return n; } void syck_replace_str( SyckNode *n, char *str, enum scalar_style style ) { return syck_replace_str2( n, str, strlen( str ), style ); } void syck_replace_str2( SyckNode *n, char *str, long len, enum scalar_style style ) { if ( n->data.str != NULL ) { S_FREE( n->data.str->ptr ); n->data.str->ptr = NULL; n->data.str->len = 0; } n->data.str->ptr = S_ALLOC_N( char, len + 1 ); n->data.str->len = len; n->data.str->style = style; memcpy( n->data.str->ptr, str, len ); n->data.str->ptr[len] = '\0'; } void syck_str_blow_away_commas( SyckNode *n ) { char *go, *end; go = n->data.str->ptr; end = go + n->data.str->len; while ( *(++go) != '\0' ) { if ( *go == ',' ) { n->data.str->len -= 1; memmove( go, go + 1, end - go ); end -= 1; } } } char * syck_str_read( SyckNode *n ) { ASSERT( n != NULL ); return n->data.str->ptr; } SyckNode * syck_new_map( SYMID key, SYMID value ) { SyckNode *n; n = syck_alloc_map(); syck_map_add( n, key, value ); return n; } void syck_map_empty( SyckNode *n ) { struct SyckMap *m; ASSERT( n != NULL ); ASSERT( n->data.list != NULL ); S_FREE( n->data.pairs->keys ); S_FREE( n->data.pairs->values ); m = n->data.pairs; m->idx = 0; m->capa = ALLOC_CT; m->keys = S_ALLOC_N( SYMID, m->capa ); m->values = S_ALLOC_N( SYMID, m->capa ); } void syck_map_add( SyckNode *map, SYMID key, SYMID value ) { struct SyckMap *m; long idx; ASSERT( map != NULL ); ASSERT( map->data.pairs != NULL ); m = map->data.pairs; idx = m->idx; m->idx += 1; if ( m->idx > m->capa ) { m->capa += ALLOC_CT; S_REALLOC_N( m->keys, SYMID, m->capa ); S_REALLOC_N( m->values, SYMID, m->capa ); } m->keys[idx] = key; m->values[idx] = value; } void syck_map_update( SyckNode *map1, SyckNode *map2 ) { struct SyckMap *m1, *m2; long new_idx, new_capa; ASSERT( map1 != NULL ); ASSERT( map2 != NULL ); m1 = map1->data.pairs; m2 = map2->data.pairs; if ( m2->idx < 1 ) return; new_idx = m1->idx; new_idx += m2->idx; new_capa = m1->capa; while ( new_idx > new_capa ) { new_capa += ALLOC_CT; } if ( new_capa > m1->capa ) { m1->capa = new_capa; S_REALLOC_N( m1->keys, SYMID, m1->capa ); S_REALLOC_N( m1->values, SYMID, m1->capa ); } for ( new_idx = 0; new_idx < m2->idx; m1->idx++, new_idx++ ) { m1->keys[m1->idx] = m2->keys[new_idx]; m1->values[m1->idx] = m2->values[new_idx]; } } long syck_map_count( SyckNode *map ) { ASSERT( map != NULL ); ASSERT( map->data.pairs != NULL ); return map->data.pairs->idx; } void syck_map_assign( SyckNode *map, enum map_part p, long idx, SYMID id ) { struct SyckMap *m; ASSERT( map != NULL ); m = map->data.pairs; ASSERT( m != NULL ); if ( p == map_key ) { m->keys[idx] = id; } else { m->values[idx] = id; } } SYMID syck_map_read( SyckNode *map, enum map_part p, long idx ) { struct SyckMap *m; ASSERT( map != NULL ); m = map->data.pairs; ASSERT( m != NULL ); if ( p == map_key ) { return m->keys[idx]; } else { return m->values[idx]; } } SyckNode * syck_new_seq( SYMID value ) { SyckNode *n; n = syck_alloc_seq(); syck_seq_add( n, value ); return n; } void syck_seq_empty( SyckNode *n ) { struct SyckSeq *s; ASSERT( n != NULL ); ASSERT( n->data.list != NULL ); S_FREE( n->data.list->items ); s = n->data.list; s->idx = 0; s->capa = ALLOC_CT; s->items = S_ALLOC_N( SYMID, s->capa ); } void syck_seq_add( SyckNode *arr, SYMID value ) { struct SyckSeq *s; long idx; ASSERT( arr != NULL ); ASSERT( arr->data.list != NULL ); s = arr->data.list; idx = s->idx; s->idx += 1; if ( s->idx > s->capa ) { s->capa += ALLOC_CT; S_REALLOC_N( s->items, SYMID, s->capa ); } s->items[idx] = value; } long syck_seq_count( SyckNode *seq ) { ASSERT( seq != NULL ); ASSERT( seq->data.list != NULL ); return seq->data.list->idx; } void syck_seq_assign( SyckNode *seq, long idx, SYMID id ) { struct SyckSeq *s; ASSERT( map != NULL ); s = seq->data.list; ASSERT( m != NULL ); s->items[idx] = id; } SYMID syck_seq_read( SyckNode *seq, long idx ) { struct SyckSeq *s; ASSERT( seq != NULL ); s = seq->data.list; ASSERT( s != NULL ); return s->items[idx]; } void syck_free_members( SyckNode *n ) { if ( n == NULL ) return; switch ( n->kind ) { case syck_str_kind: if ( n->data.str != NULL ) { S_FREE( n->data.str->ptr ); n->data.str->ptr = NULL; n->data.str->len = 0; S_FREE( n->data.str ); n->data.str = NULL; } break; case syck_seq_kind: if ( n->data.list != NULL ) { S_FREE( n->data.list->items ); S_FREE( n->data.list ); n->data.list = NULL; } break; case syck_map_kind: if ( n->data.pairs != NULL ) { S_FREE( n->data.pairs->keys ); S_FREE( n->data.pairs->values ); S_FREE( n->data.pairs ); n->data.pairs = NULL; } break; } } ruby-mkrf-0.2.3.orig/test/sample_files/syck-0.55/ext/ruby/ext/syck/syck.c0000644000000000000000000002356511672453175024544 0ustar rootroot/* * syck.c * * $Author: why $ * $Date: 2005/01/01 02:06:25 $ * * Copyright (C) 2003 why the lucky stiff */ #include #include #include "syck.h" void syck_parser_pop_level( SyckParser * ); /* * Custom assert */ void syck_assert( char *file_name, unsigned line_num ) { fflush( NULL ); fprintf( stderr, "\nAssertion failed: %s, line %u\n", file_name, line_num ); fflush( stderr ); abort(); } /* * Allocates and copies a string */ char * syck_strndup( char *buf, long len ) { char *new = S_ALLOC_N( char, len + 1 ); S_MEMZERO( new, char, len + 1 ); S_MEMCPY( new, buf, char, len ); return new; } /* * Default FILE IO function */ long syck_io_file_read( char *buf, SyckIoFile *file, long max_size, long skip ) { long len = 0; ASSERT( file != NULL ); max_size -= skip; len = fread( buf + skip, sizeof( char ), max_size, file->ptr ); len += skip; buf[len] = '\0'; return len; } /* * Default string IO function */ long syck_io_str_read( char *buf, SyckIoStr *str, long max_size, long skip ) { char *beg; long len = 0; ASSERT( str != NULL ); beg = str->ptr; if ( max_size >= 0 ) { max_size -= skip; if ( max_size <= 0 ) max_size = 0; else str->ptr += max_size; if ( str->ptr > str->end ) { str->ptr = str->end; } } else { /* Use exact string length */ while ( str->ptr < str->end ) { if (*(str->ptr++) == '\n') break; } } if ( beg < str->ptr ) { len = ( str->ptr - beg ); S_MEMCPY( buf + skip, beg, char, len ); } len += skip; buf[len] = '\0'; return len; } void syck_parser_reset_levels( SyckParser *p ) { while ( p->lvl_idx > 1 ) { syck_parser_pop_level( p ); } if ( p->lvl_idx < 1 ) { p->lvl_idx = 1; p->levels[0].spaces = -1; p->levels[0].ncount = 0; p->levels[0].domain = syck_strndup( "", 0 ); } p->levels[0].status = syck_lvl_header; } void syck_parser_reset_cursor( SyckParser *p ) { if ( p->buffer == NULL ) { p->buffer = S_ALLOC_N( char, p->bufsize ); S_MEMZERO( p->buffer, char, p->bufsize ); } p->buffer[0] = '\0'; p->cursor = NULL; p->lineptr = NULL; p->linectptr = NULL; p->token = NULL; p->toktmp = NULL; p->marker = NULL; p->limit = NULL; p->root = 0; p->root_on_error = 0; p->linect = 0; p->eof = 0; p->last_token = 0; p->force_token = 0; } /* * Value to return on a parse error */ void syck_parser_set_root_on_error( SyckParser *p, SYMID roer ) { p->root_on_error = roer; } /* * Allocate the parser */ SyckParser * syck_new_parser() { SyckParser *p; p = S_ALLOC( SyckParser ); S_MEMZERO( p, SyckParser, 1 ); p->lvl_capa = ALLOC_CT; p->levels = S_ALLOC_N( SyckLevel, p->lvl_capa ); p->input_type = syck_yaml_utf8; p->io_type = syck_io_str; p->io.str = NULL; p->syms = NULL; p->anchors = NULL; p->bad_anchors = NULL; p->implicit_typing = 1; p->taguri_expansion = 0; p->bufsize = SYCK_BUFFERSIZE; p->buffer = NULL; p->lvl_idx = 0; syck_parser_reset_levels( p ); return p; } int syck_add_sym( SyckParser *p, char *data ) { SYMID id = 0; if ( p->syms == NULL ) { p->syms = st_init_numtable(); } id = p->syms->num_entries + 1; st_insert( p->syms, id, (st_data_t)data ); return id; } int syck_lookup_sym( SyckParser *p, SYMID id, char **data ) { if ( p->syms == NULL ) return 0; return st_lookup( p->syms, id, (st_data_t *)data ); } int syck_st_free_nodes( char *key, SyckNode *n, char *arg ) { if ( n != (void *)1 ) syck_free_node( n ); n = NULL; return ST_CONTINUE; } void syck_st_free( SyckParser *p ) { /* * Free the anchor tables */ if ( p->anchors != NULL ) { st_foreach( p->anchors, syck_st_free_nodes, 0 ); st_free_table( p->anchors ); p->anchors = NULL; } if ( p->bad_anchors != NULL ) { st_foreach( p->bad_anchors, syck_st_free_nodes, 0 ); st_free_table( p->bad_anchors ); p->bad_anchors = NULL; } } void syck_free_parser( SyckParser *p ) { /* * Free the adhoc symbol table */ if ( p->syms != NULL ) { st_free_table( p->syms ); p->syms = NULL; } /* * Free tables, levels */ syck_st_free( p ); syck_parser_reset_levels( p ); S_FREE( p->levels[0].domain ); S_FREE( p->levels ); if ( p->buffer != NULL ) { S_FREE( p->buffer ); } free_any_io( p ); S_FREE( p ); } void syck_parser_handler( SyckParser *p, SyckNodeHandler hdlr ) { ASSERT( p != NULL ); p->handler = hdlr; } void syck_parser_implicit_typing( SyckParser *p, int flag ) { p->implicit_typing = ( flag == 0 ? 0 : 1 ); } void syck_parser_taguri_expansion( SyckParser *p, int flag ) { p->taguri_expansion = ( flag == 0 ? 0 : 1 ); } void syck_parser_error_handler( SyckParser *p, SyckErrorHandler hdlr ) { ASSERT( p != NULL ); p->error_handler = hdlr; } void syck_parser_bad_anchor_handler( SyckParser *p, SyckBadAnchorHandler hdlr ) { ASSERT( p != NULL ); p->bad_anchor_handler = hdlr; } void syck_parser_set_input_type( SyckParser *p, enum syck_parser_input input_type ) { ASSERT( p != NULL ); p->input_type = input_type; } void syck_parser_file( SyckParser *p, FILE *fp, SyckIoFileRead read ) { ASSERT( p != NULL ); free_any_io( p ); syck_parser_reset_cursor( p ); p->io_type = syck_io_file; p->io.file = S_ALLOC( SyckIoFile ); p->io.file->ptr = fp; if ( read != NULL ) { p->io.file->read = read; } else { p->io.file->read = syck_io_file_read; } } void syck_parser_str( SyckParser *p, char *ptr, long len, SyckIoStrRead read ) { ASSERT( p != NULL ); free_any_io( p ); syck_parser_reset_cursor( p ); p->io_type = syck_io_str; p->io.str = S_ALLOC( SyckIoStr ); p->io.str->beg = ptr; p->io.str->ptr = ptr; p->io.str->end = ptr + len; if ( read != NULL ) { p->io.str->read = read; } else { p->io.str->read = syck_io_str_read; } } void syck_parser_str_auto( SyckParser *p, char *ptr, SyckIoStrRead read ) { syck_parser_str( p, ptr, strlen( ptr ), read ); } SyckLevel * syck_parser_current_level( SyckParser *p ) { return &p->levels[p->lvl_idx-1]; } void syck_parser_pop_level( SyckParser *p ) { ASSERT( p != NULL ); /* The root level should never be popped */ if ( p->lvl_idx <= 1 ) return; p->lvl_idx -= 1; free( p->levels[p->lvl_idx].domain ); } void syck_parser_add_level( SyckParser *p, int len, enum syck_level_status status ) { ASSERT( p != NULL ); if ( p->lvl_idx + 1 > p->lvl_capa ) { p->lvl_capa += ALLOC_CT; S_REALLOC_N( p->levels, SyckLevel, p->lvl_capa ); } ASSERT( len > p->levels[p->lvl_idx-1].spaces ); p->levels[p->lvl_idx].spaces = len; p->levels[p->lvl_idx].ncount = 0; p->levels[p->lvl_idx].domain = syck_strndup( p->levels[p->lvl_idx-1].domain, strlen( p->levels[p->lvl_idx-1].domain ) ); p->levels[p->lvl_idx].status = status; p->lvl_idx += 1; } void free_any_io( SyckParser *p ) { ASSERT( p != NULL ); switch ( p->io_type ) { case syck_io_str: if ( p->io.str != NULL ) { S_FREE( p->io.str ); p->io.str = NULL; } break; case syck_io_file: if ( p->io.file != NULL ) { S_FREE( p->io.file ); p->io.file = NULL; } break; } } long syck_move_tokens( SyckParser *p ) { long count, skip; ASSERT( p->buffer != NULL ); if ( p->token == NULL ) return 0; skip = p->limit - p->token; if ( skip < 1 ) return 0; if ( ( count = p->token - p->buffer ) ) { S_MEMMOVE( p->buffer, p->token, char, skip ); p->token = p->buffer; p->marker -= count; p->cursor -= count; p->toktmp -= count; p->limit -= count; p->lineptr -= count; p->linectptr -= count; } return skip; } void syck_check_limit( SyckParser *p, long len ) { if ( p->cursor == NULL ) { p->cursor = p->buffer; p->lineptr = p->buffer; p->linectptr = p->buffer; p->marker = p->buffer; } p->limit = p->buffer + len; } long syck_parser_read( SyckParser *p ) { long len = 0; long skip = 0; ASSERT( p != NULL ); switch ( p->io_type ) { case syck_io_str: skip = syck_move_tokens( p ); len = (p->io.str->read)( p->buffer, p->io.str, SYCK_BUFFERSIZE - 1, skip ); break; case syck_io_file: skip = syck_move_tokens( p ); len = (p->io.file->read)( p->buffer, p->io.file, SYCK_BUFFERSIZE - 1, skip ); break; } syck_check_limit( p, len ); return len; } long syck_parser_readlen( SyckParser *p, long max_size ) { long len = 0; long skip = 0; ASSERT( p != NULL ); switch ( p->io_type ) { case syck_io_str: skip = syck_move_tokens( p ); len = (p->io.str->read)( p->buffer, p->io.str, max_size, skip ); break; case syck_io_file: skip = syck_move_tokens( p ); len = (p->io.file->read)( p->buffer, p->io.file, max_size, skip ); break; } syck_check_limit( p, len ); return len; } SYMID syck_parse( SyckParser *p ) { ASSERT( p != NULL ); syck_st_free( p ); syck_parser_reset_levels( p ); syckparse( p ); return p->root; } void syck_default_error_handler( SyckParser *p, char *msg ) { printf( "Error at [Line %d, Col %d]: %s\n", p->linect, p->cursor - p->lineptr, msg ); } ruby-mkrf-0.2.3.orig/test/sample_files/syck-0.55/ext/ruby/ext/syck/gram.h0000644000000000000000000000432611672453175024520 0ustar rootroot/* A Bison parser, made by GNU Bison 1.875d. */ /* Skeleton parser for Yacc-like parsing with Bison, Copyright (C) 1984, 1989, 1990, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc. This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, write to the Free Software Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. */ /* As a special exception, when this file is copied by Bison into a Bison output file, you may use that output file without restriction. This special exception was added by the Free Software Foundation in version 1.24 of Bison. */ /* Tokens. */ #ifndef YYTOKENTYPE # define YYTOKENTYPE /* Put the tokens into the symbol table, so that GDB and other debuggers know about them. */ enum yytokentype { YAML_ANCHOR = 258, YAML_ALIAS = 259, YAML_TRANSFER = 260, YAML_TAGURI = 261, YAML_ITRANSFER = 262, YAML_WORD = 263, YAML_PLAIN = 264, YAML_BLOCK = 265, YAML_DOCSEP = 266, YAML_IOPEN = 267, YAML_INDENT = 268, YAML_IEND = 269 }; #endif #define YAML_ANCHOR 258 #define YAML_ALIAS 259 #define YAML_TRANSFER 260 #define YAML_TAGURI 261 #define YAML_ITRANSFER 262 #define YAML_WORD 263 #define YAML_PLAIN 264 #define YAML_BLOCK 265 #define YAML_DOCSEP 266 #define YAML_IOPEN 267 #define YAML_INDENT 268 #define YAML_IEND 269 #if ! defined (YYSTYPE) && ! defined (YYSTYPE_IS_DECLARED) #line 35 "gram.y" typedef union YYSTYPE { SYMID nodeId; SyckNode *nodeData; char *name; } YYSTYPE; /* Line 1285 of yacc.c. */ #line 71 "gram.h" # define yystype YYSTYPE /* obsolescent; will be withdrawn */ # define YYSTYPE_IS_DECLARED 1 # define YYSTYPE_IS_TRIVIAL 1 #endif ruby-mkrf-0.2.3.orig/test/sample_files/syck-0.55/ext/ruby/ext/syck/extconf.rb0000644000000000000000000000053311672453175025410 0ustar rootrootrequire "../../../../../../../lib/mkrf" [ 'emitter.c', 'gram.c', 'gram.h', 'handler.c', 'node.c', 'syck.c', 'syck.h', 'token.c', 'bytecode.c', 'implicit.c', 'yaml2byte.c', 'yamlbyte.h' ].each do |codefile| `cp #{File::dirname $0}/../../../../lib/#{codefile} #{codefile}` end Mkrf::Generator.new('syck') do |g| g.include_header("st.h") end ruby-mkrf-0.2.3.orig/test/sample_files/syck-0.55/ext/ruby/ext/syck/yaml2byte.c0000644000000000000000000001645411672453175025502 0ustar rootroot/* * yaml2byte.c * * $Author: why $ * $Date: 2004/08/02 17:32:35 $ * * Copyright (C) 2003 why the lucky stiff, clark evans * * WARNING WARNING WARNING --- THIS IS *NOT JUST* PLAYING * ANYMORE! -- WHY HAS EMBRACED THIS AS THE REAL THING! */ #include #include #define YAMLBYTE_UTF8 #include "yamlbyte.h" #include #define TRACE0(a) \ do { printf(a); printf("\n"); fflush(stdout); } while(0) #define TRACE1(a,b) \ do { printf(a,b); printf("\n"); fflush(stdout); } while(0) #define TRACE2(a,b,c) \ do { printf(a,b,c); printf("\n"); fflush(stdout); } while(0) #define TRACE3(a,b,c,d) \ do { printf(a,b,c,d); printf("\n"); fflush(stdout); } while(0) /* Reinvent the wheel... */ #define CHUNKSIZE 64 #define HASH ((long)0xCAFECAFE) typedef struct { long hash; char *buffer; long length; long remaining; int printed; } bytestring_t; bytestring_t *bytestring_alloc() { bytestring_t *ret; /*TRACE0("bytestring_alloc()");*/ ret = S_ALLOC(bytestring_t); ret->hash = HASH; ret->length = CHUNKSIZE; ret->remaining = ret->length; ret->buffer = S_ALLOC_N(char, ret->length + 1 ); ret->buffer[0] = 0; ret->printed = 0; return ret; } void bytestring_append(bytestring_t *str, char code, char *start, char *finish) { long grow; long length = 2; /* CODE + LF */ char *curr; assert(str && HASH == str->hash); /*TRACE0("bytestring_append()");*/ if(start) { if(!finish) finish = start + strlen(start); length += (finish-start); } if(length > str->remaining) { grow = (length - str->remaining) + CHUNKSIZE; str->remaining += grow; str->length += grow; str->buffer = S_REALLOC_N( str->buffer, char, str->length + 1 ); assert(str->buffer); } curr = str->buffer + (str->length - str->remaining); *curr = code; curr += 1; if(start) while(start < finish) *curr ++ = *start ++; *curr = '\n'; curr += 1; *curr = 0; str->remaining = str->remaining - length; assert( (str->buffer + str->length) - str->remaining ); } void bytestring_extend(bytestring_t *str, bytestring_t *ext) { char *from; char *curr; char *stop; long grow; long length; assert(str && HASH == str->hash); assert(ext && HASH == ext->hash); if(ext->printed) { assert(ext->buffer[0] ==YAMLBYTE_ANCHOR); curr = ext->buffer; while( '\n' != *curr) curr++; bytestring_append(str, YAMLBYTE_ALIAS, ext->buffer + 1, curr); } else { ext->printed = 1; length = (ext->length - ext->remaining); if(length > str->remaining) { grow = (length - str->remaining) + CHUNKSIZE; str->remaining += grow; str->length += grow; str->buffer = S_REALLOC_N( str->buffer, char, str->length + 1 ); } curr = str->buffer + (str->length - str->remaining); from = ext->buffer; stop = ext->buffer + length; while( from < stop ) *curr ++ = *from ++; *curr = 0; str->remaining = str->remaining - length; assert( (str->buffer + str->length) - str->remaining ); } } /* convert SyckNode into yamlbyte_buffer_t objects */ SYMID syck_yaml2byte_handler(p, n) SyckParser *p; SyckNode *n; { SYMID oid; long i; char ch; char nextcode; char *start; char *current; char *finish; bytestring_t *val = NULL; bytestring_t *sav = NULL; /*TRACE0("syck_yaml2byte_handler()");*/ val = bytestring_alloc(); if(n->anchor) bytestring_append(val,YAMLBYTE_ANCHOR, n->anchor, NULL); if ( n->type_id ) { if ( p->taguri_expansion ) { bytestring_append(val,YAMLBYTE_TRANSFER, n->type_id, NULL); } else { char *type_tag = S_ALLOC_N( char, strlen( n->type_id ) + 1 ); type_tag[0] = '\0'; strcat( type_tag, "!" ); strcat( type_tag, n->type_id ); bytestring_append( val, YAMLBYTE_TRANSFER, type_tag, NULL); } } switch (n->kind) { case syck_str_kind: nextcode = YAMLBYTE_SCALAR; start = n->data.str->ptr; finish = start + n->data.str->len - 1; current = start; /*TRACE2("SCALAR: %s %d", start, n->data.str->len); */ while(1) { ch = *current; if('\n' == ch || 0 == ch || current > finish) { if(current >= start) { bytestring_append(val, nextcode, start, current); nextcode = YAMLBYTE_CONTINUE; } start = current + 1; if(current > finish) { break; } else if('\n' == ch ) { bytestring_append(val,YAMLBYTE_NEWLINE,NULL,NULL); } else if(0 == ch) { bytestring_append(val,YAMLBYTE_NULLCHAR,NULL,NULL); } else { assert("oops"); } } current += 1; } break; case syck_seq_kind: bytestring_append(val,YAMLBYTE_SEQUENCE,NULL,NULL); for ( i = 0; i < n->data.list->idx; i++ ) { oid = syck_seq_read( n, i ); syck_lookup_sym( p, oid, (char **)&sav ); bytestring_extend(val, sav); } bytestring_append(val,YAMLBYTE_END_BRANCH,NULL,NULL); break; case syck_map_kind: bytestring_append(val,YAMLBYTE_MAPPING,NULL,NULL); for ( i = 0; i < n->data.pairs->idx; i++ ) { oid = syck_map_read( n, map_key, i ); syck_lookup_sym( p, oid, (char **)&sav ); bytestring_extend(val, sav); oid = syck_map_read( n, map_value, i ); syck_lookup_sym( p, oid, (char **)&sav ); bytestring_extend(val, sav); } bytestring_append(val,YAMLBYTE_END_BRANCH,NULL,NULL); break; } oid = syck_add_sym( p, (char *) val ); /*TRACE1("Saving: %s", val->buffer );*/ return oid; } char * syck_yaml2byte(char *yamlstr) { SYMID oid; char *ret; bytestring_t *sav; SyckParser *parser = syck_new_parser(); syck_parser_str_auto( parser, yamlstr, NULL ); syck_parser_handler( parser, syck_yaml2byte_handler ); syck_parser_error_handler( parser, NULL ); syck_parser_implicit_typing( parser, 1 ); syck_parser_taguri_expansion( parser, 1 ); oid = syck_parse( parser ); syck_lookup_sym( parser, oid, (char **)&sav ); ret = S_ALLOC_N( char, strlen( sav->buffer ) + 3 ); ret[0] = '\0'; strcat( ret, "D\n" ); strcat( ret, sav->buffer ); syck_free_parser( parser ); return ret; } #ifdef TEST_YBEXT #include int main() { char *yaml = "test: 1\nand: \"with new\\nline\\n\"\nalso: &3 three\nmore: *3"; printf("--- # YAML \n"); printf(yaml); printf("\n...\n"); printf(syck_yaml2byte(yaml)); return 0; } #endif ruby-mkrf-0.2.3.orig/test/sample_files/syck-0.55/ext/ruby/ext/syck/MANIFEST0000644000000000000000000000001211672453175024536 0ustar rootrootrubyext.c ruby-mkrf-0.2.3.orig/test/sample_files/syck-0.55/ext/ruby/ext/syck/emitter.c0000644000000000000000000010315011672453175025231 0ustar rootroot/* * emitter.c * * $Author: why $ * $Date: 2005/05/19 06:07:42 $ * * Copyright (C) 2003 why the lucky stiff * * All Base64 code from Ruby's pack.c. * Ruby is Copyright (C) 1993-2003 Yukihiro Matsumoto */ #include #include #include "syck.h" #define DEFAULT_ANCHOR_FORMAT "id%03d" const char hex_table[] = "0123456789ABCDEF"; static char b64_table[] = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/"; /* * Built-in base64 (from Ruby's pack.c) */ char * syck_base64enc( char *s, long len ) { long i = 0; int padding = '='; char *buff = S_ALLOC_N(char, len * 4 / 3 + 6); while (len >= 3) { buff[i++] = b64_table[077 & (*s >> 2)]; buff[i++] = b64_table[077 & (((*s << 4) & 060) | ((s[1] >> 4) & 017))]; buff[i++] = b64_table[077 & (((s[1] << 2) & 074) | ((s[2] >> 6) & 03))]; buff[i++] = b64_table[077 & s[2]]; s += 3; len -= 3; } if (len == 2) { buff[i++] = b64_table[077 & (*s >> 2)]; buff[i++] = b64_table[077 & (((*s << 4) & 060) | ((s[1] >> 4) & 017))]; buff[i++] = b64_table[077 & (((s[1] << 2) & 074) | (('\0' >> 6) & 03))]; buff[i++] = padding; } else if (len == 1) { buff[i++] = b64_table[077 & (*s >> 2)]; buff[i++] = b64_table[077 & (((*s << 4) & 060) | (('\0' >> 4) & 017))]; buff[i++] = padding; buff[i++] = padding; } buff[i++] = '\n'; return buff; } char * syck_base64dec( char *s, long len ) { int a = -1,b = -1,c = 0,d; static int first = 1; static int b64_xtable[256]; char *ptr = syck_strndup( s, len ); char *end = ptr; char *send = s + len; if (first) { int i; first = 0; for (i = 0; i < 256; i++) { b64_xtable[i] = -1; } for (i = 0; i < 64; i++) { b64_xtable[(int)b64_table[i]] = i; } } while (s < send) { while (s[0] == '\r' || s[0] == '\n') { s++; } if ((a = b64_xtable[(int)s[0]]) == -1) break; if ((b = b64_xtable[(int)s[1]]) == -1) break; if ((c = b64_xtable[(int)s[2]]) == -1) break; if ((d = b64_xtable[(int)s[3]]) == -1) break; *end++ = a << 2 | b >> 4; *end++ = b << 4 | c >> 2; *end++ = c << 6 | d; s += 4; } if (a != -1 && b != -1) { if (s + 2 < send && s[2] == '=') *end++ = a << 2 | b >> 4; if (c != -1 && s + 3 < send && s[3] == '=') { *end++ = a << 2 | b >> 4; *end++ = b << 4 | c >> 2; } } *end = '\0'; /*RSTRING(buf)->len = ptr - RSTRING(buf)->ptr;*/ return ptr; } /* * Allocate an emitter */ SyckEmitter * syck_new_emitter() { SyckEmitter *e; e = S_ALLOC( SyckEmitter ); e->headless = 0; e->use_header = 0; e->use_version = 0; e->sort_keys = 0; e->anchor_format = NULL; e->explicit_typing = 0; e->best_width = 80; e->style = scalar_none; e->stage = doc_open; e->indent = 2; e->level = -1; e->anchors = NULL; e->markers = NULL; e->anchored = NULL; e->bufsize = SYCK_BUFFERSIZE; e->buffer = NULL; e->marker = NULL; e->bufpos = 0; e->emitter_handler = NULL; e->output_handler = NULL; e->lvl_idx = 0; e->lvl_capa = ALLOC_CT; e->levels = S_ALLOC_N( SyckLevel, e->lvl_capa ); syck_emitter_reset_levels( e ); e->bonus = NULL; return e; } int syck_st_free_anchors( char *key, char *name, char *arg ) { S_FREE( name ); return ST_CONTINUE; } void syck_emitter_st_free( SyckEmitter *e ) { /* * Free the anchor tables */ if ( e->anchors != NULL ) { st_foreach( e->anchors, syck_st_free_anchors, 0 ); st_free_table( e->anchors ); e->anchors = NULL; } if ( e->anchored != NULL ) { st_free_table( e->anchored ); e->anchored = NULL; } /* * Free the markers tables */ if ( e->markers != NULL ) { st_free_table( e->markers ); e->markers = NULL; } } SyckLevel * syck_emitter_current_level( SyckEmitter *e ) { return &e->levels[e->lvl_idx-1]; } SyckLevel * syck_emitter_parent_level( SyckEmitter *e ) { return &e->levels[e->lvl_idx-2]; } void syck_emitter_pop_level( SyckEmitter *e ) { ASSERT( e != NULL ); /* The root level should never be popped */ if ( e->lvl_idx <= 1 ) return; e->lvl_idx -= 1; free( e->levels[e->lvl_idx].domain ); } void syck_emitter_add_level( SyckEmitter *e, int len, enum syck_level_status status ) { ASSERT( e != NULL ); if ( e->lvl_idx + 1 > e->lvl_capa ) { e->lvl_capa += ALLOC_CT; S_REALLOC_N( e->levels, SyckLevel, e->lvl_capa ); } ASSERT( len > e->levels[e->lvl_idx-1].spaces ); e->levels[e->lvl_idx].spaces = len; e->levels[e->lvl_idx].ncount = 0; e->levels[e->lvl_idx].domain = syck_strndup( e->levels[e->lvl_idx-1].domain, strlen( e->levels[e->lvl_idx-1].domain ) ); e->levels[e->lvl_idx].status = status; e->levels[e->lvl_idx].anctag = 0; e->lvl_idx += 1; } void syck_emitter_reset_levels( SyckEmitter *e ) { while ( e->lvl_idx > 1 ) { syck_emitter_pop_level( e ); } if ( e->lvl_idx < 1 ) { e->lvl_idx = 1; e->levels[0].spaces = -1; e->levels[0].ncount = 0; e->levels[0].domain = syck_strndup( "", 0 ); e->levels[0].anctag = 0; } e->levels[0].status = syck_lvl_header; } void syck_emitter_handler( SyckEmitter *e, SyckEmitterHandler hdlr ) { e->emitter_handler = hdlr; } void syck_output_handler( SyckEmitter *e, SyckOutputHandler hdlr ) { e->output_handler = hdlr; } void syck_free_emitter( SyckEmitter *e ) { /* * Free tables */ syck_emitter_st_free( e ); syck_emitter_reset_levels( e ); S_FREE( e->levels[0].domain ); S_FREE( e->levels ); if ( e->buffer != NULL ) { S_FREE( e->buffer ); } S_FREE( e ); } void syck_emitter_clear( SyckEmitter *e ) { if ( e->buffer == NULL ) { e->buffer = S_ALLOC_N( char, e->bufsize ); S_MEMZERO( e->buffer, char, e->bufsize ); } e->buffer[0] = '\0'; e->marker = e->buffer; e->bufpos = 0; } /* * Raw write to the emitter buffer. */ void syck_emitter_write( SyckEmitter *e, char *str, long len ) { long at; ASSERT( str != NULL ) if ( e->buffer == NULL ) { syck_emitter_clear( e ); } /* * Flush if at end of buffer */ at = e->marker - e->buffer; if ( len + at >= e->bufsize ) { syck_emitter_flush( e, 0 ); for (;;) { long rest = e->bufsize - (e->marker - e->buffer); if (len <= rest) break; S_MEMCPY( e->marker, str, char, rest ); e->marker += rest; str += rest; len -= rest; syck_emitter_flush( e, 0 ); } } /* * Write to buffer */ S_MEMCPY( e->marker, str, char, len ); e->marker += len; e->marker[0] = '\0'; } /* * Write a chunk of data out. */ void syck_emitter_flush( SyckEmitter *e, long check_room ) { /* * Check for enough space in the buffer for check_room length. */ if ( check_room > 0 ) { if ( e->bufsize > ( e->marker - e->buffer ) + check_room ) { return; } } else { check_room = e->bufsize; } /* * Determine headers. */ if ( ( e->stage == doc_open && ( e->headless == 0 || e->use_header == 1 ) ) || e->stage == doc_need_header ) { if ( e->use_version == 1 ) { char *header = S_ALLOC_N( char, 64 ); S_MEMZERO( header, char, 64 ); sprintf( header, "--- %%YAML:%d.%d ", SYCK_YAML_MAJOR, SYCK_YAML_MINOR ); (e->output_handler)( e, header, strlen( header ) ); S_FREE( header ); } else { (e->output_handler)( e, "--- ", 4 ); } e->stage = doc_processing; } /* * Commit buffer. */ if ( check_room > e->marker - e->buffer ) { check_room = e->marker - e->buffer; } (e->output_handler)( e, e->buffer, check_room ); e->bufpos += check_room; e->marker -= check_room; } /* * Start emitting from the given node, check for anchoring and then * issue the callback to the emitter handler. */ void syck_emit( SyckEmitter *e, st_data_t n ) { SYMID oid; char *anchor_name = NULL; int indent = 0, x = 0; SyckLevel *lvl = syck_emitter_current_level( e ); /* Add new level */ if ( lvl->spaces >= 0 ) { indent = lvl->spaces + e->indent; } syck_emitter_add_level( e, indent, syck_lvl_open ); lvl = syck_emitter_current_level( e ); /* Look for anchor */ if ( e->anchors != NULL && st_lookup( e->markers, n, (st_data_t *)&oid ) && st_lookup( e->anchors, (st_data_t)oid, (st_data_t *)&anchor_name ) ) { if ( e->anchored == NULL ) { e->anchored = st_init_numtable(); } if ( ! st_lookup( e->anchored, (st_data_t)anchor_name, (st_data_t *)&x ) ) { char *an = S_ALLOC_N( char, strlen( anchor_name ) + 3 ); sprintf( an, "&%s ", anchor_name ); syck_emitter_write( e, an, strlen( anchor_name ) + 2 ); free( an ); x = 1; st_insert( e->anchored, (st_data_t)anchor_name, (st_data_t)x ); lvl->anctag = 1; } else { char *an = S_ALLOC_N( char, strlen( anchor_name ) + 2 ); sprintf( an, "*%s", anchor_name ); syck_emitter_write( e, an, strlen( anchor_name ) + 1 ); free( an ); goto end_emit; } } (e->emitter_handler)( e, n ); /* Pop the level */ end_emit: syck_emitter_pop_level( e ); if ( e->lvl_idx == 1 ) { syck_emitter_write( e, "\n", 1 ); e->stage = doc_open; } } /* * Determine what tag needs to be written, based on the taguri of the node * and the implicit tag which would be assigned to this node. If a tag is * required, write the tag. */ void syck_emit_tag( SyckEmitter *e, char *tag, char *ignore ) { SyckLevel *lvl; if ( tag == NULL ) return; if ( ignore != NULL && syck_tagcmp( tag, ignore ) == 0 && e->explicit_typing == 0 ) return; lvl = syck_emitter_current_level( e ); /* implicit */ if ( strlen( tag ) == 0 ) { syck_emitter_write( e, "! ", 2 ); /* global types */ } else if ( strncmp( tag, "tag:", 4 ) == 0 ) { int taglen = strlen( tag ); syck_emitter_write( e, "!", 1 ); if ( strncmp( tag + 4, YAML_DOMAIN, strlen( YAML_DOMAIN ) ) == 0 ) { int skip = 4 + strlen( YAML_DOMAIN ) + 1; syck_emitter_write( e, tag + skip, taglen - skip ); } else { char *subd = tag + 4; while ( *subd != ':' && *subd != '\0' ) subd++; if ( *subd == ':' ) { if ( subd - tag > ( strlen( YAML_DOMAIN ) + 5 ) && strncmp( subd - strlen( YAML_DOMAIN ), YAML_DOMAIN, strlen( YAML_DOMAIN ) ) == 0 ) { syck_emitter_write( e, tag + 4, subd - strlen( YAML_DOMAIN ) - ( tag + 4 ) - 1 ); syck_emitter_write( e, "/", 1 ); syck_emitter_write( e, subd + 1, ( tag + taglen ) - ( subd + 1 ) ); } else { syck_emitter_write( e, tag + 4, subd - ( tag + 4 ) ); syck_emitter_write( e, "/", 1 ); syck_emitter_write( e, subd + 1, ( tag + taglen ) - ( subd + 1 ) ); } } else { /* TODO: Invalid tag (no colon after domain) */ return; } } syck_emitter_write( e, " ", 1 ); /* private types */ } else if ( strncmp( tag, "x-private:", 10 ) == 0 ) { syck_emitter_write( e, "!!", 2 ); syck_emitter_write( e, tag + 10, strlen( tag ) - 10 ); syck_emitter_write( e, " ", 1 ); } lvl->anctag = 1; } /* * Emit a newline and an appropriately spaced indent. */ void syck_emit_indent( SyckEmitter *e ) { int i; SyckLevel *lvl = syck_emitter_current_level( e ); if ( lvl->spaces >= 0 ) { char *spcs = S_ALLOC_N( char, lvl->spaces + 2 ); spcs[0] = '\n'; spcs[lvl->spaces + 1] = '\0'; for ( i = 0; i < lvl->spaces; i++ ) spcs[i+1] = ' '; syck_emitter_write( e, spcs, lvl->spaces + 1 ); free( spcs ); } } /* Clear the scan */ #define SCAN_NONE 0 /* All printable characters? */ #define SCAN_NONPRINT 1 /* Any indented lines? */ #define SCAN_INDENTED 2 /* Larger than the requested width? */ #define SCAN_WIDE 4 /* Opens with whitespace? */ #define SCAN_WHITESTART 8 /* Contains a newline */ #define SCAN_NEWLINE 16 /* Contains a single quote */ #define SCAN_SINGLEQ 32 /* Contains a double quote */ #define SCAN_DOUBLEQ 64 /* Starts with a token */ #define SCAN_INDIC_S 128 /* Contains a flow indicator */ #define SCAN_INDIC_C 256 /* Ends without newlines */ #define SCAN_NONL_E 512 /* Ends with many newlines */ #define SCAN_MANYNL_E 1024 /* Contains flow map indicators */ #define SCAN_FLOWMAP 2048 /* Contains flow seq indicators */ #define SCAN_FLOWSEQ 4096 /* Contains a valid doc separator */ #define SCAN_DOCSEP 8192 /* * Basic printable test for LATIN-1 characters. */ int syck_scan_scalar( int req_width, char *cursor, long len ) { long i = 0, start = 0; int flags = SCAN_NONE; if ( len < 1 ) return flags; /* c-indicators from the spec */ if ( cursor[0] == '[' || cursor[0] == ']' || cursor[0] == '{' || cursor[0] == '}' || cursor[0] == '!' || cursor[0] == '*' || cursor[0] == '&' || cursor[0] == '|' || cursor[0] == '>' || cursor[0] == '\'' || cursor[0] == '"' || cursor[0] == '#' || cursor[0] == '%' || cursor[0] == '@' || cursor[0] == '&' ) { flags |= SCAN_INDIC_S; } if ( ( cursor[0] == '-' || cursor[0] == ':' || cursor[0] == '?' || cursor[0] == ',' ) && cursor[1] == ' ' ) { flags |= SCAN_INDIC_S; } /* ending newlines */ if ( cursor[len-1] != '\n' ) { flags |= SCAN_NONL_E; } else if ( len > 1 && cursor[len-2] == '\n' ) { flags |= SCAN_MANYNL_E; } /* opening doc sep */ if ( len >= 3 && strncmp( cursor, "---", 3 ) == 0 ) flags |= SCAN_DOCSEP; /* scan string */ for ( i = 0; i < len; i++ ) { if ( ! ( cursor[i] == 0x9 || cursor[i] == 0xA || cursor[i] == 0xD || ( cursor[i] >= 0x20 && cursor[i] <= 0x7E ) ) ) { flags |= SCAN_NONPRINT; } else if ( cursor[i] == '\n' ) { flags |= SCAN_NEWLINE; if ( len - i >= 3 && strncmp( &cursor[i+1], "---", 3 ) == 0 ) flags |= SCAN_DOCSEP; if ( cursor[i+1] == ' ' || cursor[i+1] == '\t' ) flags |= SCAN_INDENTED; if ( req_width > 0 && i - start > req_width ) flags |= SCAN_WIDE; start = i; } else if ( cursor[i] == '\'' ) { flags |= SCAN_SINGLEQ; } else if ( cursor[i] == '"' ) { flags |= SCAN_DOUBLEQ; } else if ( cursor[i] == ']' ) { flags |= SCAN_FLOWSEQ; } else if ( cursor[i] == '}' ) { flags |= SCAN_FLOWMAP; } /* remember, if plain collections get implemented, to add nb-plain-flow-char */ else if ( ( cursor[i] == ' ' && cursor[i+1] == '#' ) || ( cursor[i] == ':' && cursor[i+1] == ' ' ) ) { flags |= SCAN_INDIC_C; } else if ( cursor[i] == ',' && cursor[i+1] == ' ' ) { flags |= SCAN_FLOWMAP; flags |= SCAN_FLOWSEQ; } if ( i == 0 && ( cursor[i] == ' ' || cursor[i] == '\t' ) ) { flags |= SCAN_WHITESTART; } } /* printf( "---STR---\n%s\nFLAGS: %d\n", cursor, flags ); */ return flags; } /* * All scalars should be emitted through this function, which determines an appropriate style, * tag and indent. */ void syck_emit_scalar( SyckEmitter *e, char *tag, enum scalar_style force_style, int force_indent, int force_width, char keep_nl, char *str, long len ) { enum scalar_style favor_style = scalar_literal; SyckLevel *parent = syck_emitter_parent_level( e ); SyckLevel *lvl = syck_emitter_current_level( e ); int scan; char *implicit; if ( str == NULL ) str = ""; /* No empty nulls as map keys */ if ( len == 0 && ( parent->status == syck_lvl_map || parent->status == syck_lvl_imap ) && parent->ncount % 2 == 1 && syck_tagcmp( tag, "tag:yaml.org,2002:null" ) == 0 ) { str = "~"; len = 1; } scan = syck_scan_scalar( force_width, str, len ); implicit = syck_match_implicit( str, len ); /* quote strings which default to implicits */ implicit = syck_taguri( YAML_DOMAIN, implicit, strlen( implicit ) ); if ( syck_tagcmp( tag, implicit ) != 0 && syck_tagcmp( tag, "tag:yaml.org,2002:str" ) == 0 ) { force_style = scalar_2quote; } else { syck_emit_tag( e, tag, implicit ); } S_FREE( implicit ); /* if still arbitrary, sniff a good block style. */ if ( force_style == scalar_none ) { if ( scan & SCAN_NEWLINE ) { force_style = scalar_literal; } else { force_style = scalar_plain; } } if ( e->style == scalar_fold ) { favor_style = scalar_fold; } /* Determine block style */ if ( scan & SCAN_NONPRINT ) { force_style = scalar_2quote; } else if ( scan & SCAN_WHITESTART ) { force_style = scalar_2quote; } else if ( force_style != scalar_fold && ( scan & SCAN_INDENTED ) ) { force_style = scalar_literal; } else if ( force_style == scalar_plain && ( scan & SCAN_NEWLINE ) ) { force_style = favor_style; } else if ( force_style == scalar_plain && parent->status == syck_lvl_iseq && ( scan & SCAN_FLOWSEQ ) ) { force_style = scalar_2quote; } else if ( force_style == scalar_plain && parent->status == syck_lvl_imap && ( scan & SCAN_FLOWMAP ) ) { force_style = scalar_2quote; /* } else if ( force_style == scalar_fold && ( ! ( scan & SCAN_WIDE ) ) ) { force_style = scalar_literal; */ } else if ( force_style == scalar_plain && ( scan & SCAN_INDIC_S || scan & SCAN_INDIC_C ) ) { if ( scan & SCAN_NEWLINE ) { force_style = favor_style; } else { force_style = scalar_2quote; } } if ( force_indent > 0 ) { lvl->spaces = parent->spaces + force_indent; } else if ( scan & SCAN_DOCSEP ) { lvl->spaces = parent->spaces + e->indent; } /* For now, all ambiguous keys are going to be double-quoted */ if ( parent->status == syck_lvl_map && parent->ncount % 2 == 1 ) { if ( force_style != scalar_plain ) { force_style = scalar_2quote; } } /* If the parent is an inline, double quote anything complex */ if ( parent->status == syck_lvl_imap || parent->status == syck_lvl_iseq ) { if ( force_style != scalar_plain && force_style != scalar_1quote ) { force_style = scalar_2quote; } } /* Fix the ending newlines */ if ( scan & SCAN_NONL_E ) { keep_nl = NL_CHOMP; } else if ( scan & SCAN_MANYNL_E ) { keep_nl = NL_KEEP; } /* Write the text node */ switch ( force_style ) { case scalar_1quote: syck_emit_1quoted( e, force_width, str, len ); break; case scalar_2quote: syck_emit_2quoted( e, force_width, str, len ); break; case scalar_fold: syck_emit_folded( e, force_width, keep_nl, str, len ); break; case scalar_literal: syck_emit_literal( e, keep_nl, str, len ); break; case scalar_plain: syck_emitter_write( e, str, len ); break; } } void syck_emitter_escape( SyckEmitter *e, char *src, long len ) { int i; for( i = 0; i < len; i++ ) { if( (src[i] < 0x20) || (0x7E < src[i]) ) { syck_emitter_write( e, "\\", 1 ); if( '\0' == src[i] ) syck_emitter_write( e, "0", 1 ); else { syck_emitter_write( e, "x", 1 ); syck_emitter_write( e, (char *)hex_table + ((src[i] & 0xF0) >> 4), 1 ); syck_emitter_write( e, (char *)hex_table + (src[i] & 0x0F), 1 ); } } else { syck_emitter_write( e, src + i, 1 ); if( '\\' == src[i] ) syck_emitter_write( e, "\\", 1 ); } } } /* * Outputs a single-quoted block. */ void syck_emit_1quoted( SyckEmitter *e, int width, char *str, long len ) { char do_indent = 0; char *mark = str; char *start = str; char *end = str; syck_emitter_write( e, "'", 1 ); while ( mark < str + len ) { if ( do_indent ) { syck_emit_indent( e ); do_indent = 0; } switch ( *mark ) { case '\'': syck_emitter_write( e, "'", 1 ); break; case '\n': end = mark + 1; if ( *start != ' ' && *start != '\n' && *end != '\n' && *end != ' ' ) { syck_emitter_write( e, "\n\n", 2 ); } else { syck_emitter_write( e, "\n", 1 ); } do_indent = 1; start = mark + 1; break; case ' ': if ( width > 0 && *start != ' ' && mark - end > width ) { do_indent = 1; end = mark + 1; } else { syck_emitter_write( e, " ", 1 ); } break; default: syck_emitter_write( e, mark, 1 ); break; } mark++; } syck_emitter_write( e, "'", 1 ); } /* * Outputs a double-quoted block. */ void syck_emit_2quoted( SyckEmitter *e, int width, char *str, long len ) { char do_indent = 0; char *mark = str; char *start = str; char *end = str; syck_emitter_write( e, "\"", 1 ); while ( mark < str + len ) { if ( do_indent > 0 ) { if ( do_indent == 2 ) { syck_emitter_write( e, "\\", 1 ); } syck_emit_indent( e ); do_indent = 0; } switch ( *mark ) { /* Escape sequences allowed within double quotes. */ case '"': syck_emitter_write( e, "\\\"", 2 ); break; case '\\': syck_emitter_write( e, "\\\\", 2 ); break; case '\0': syck_emitter_write( e, "\\0", 2 ); break; case '\a': syck_emitter_write( e, "\\a", 2 ); break; case '\b': syck_emitter_write( e, "\\b", 2 ); break; case '\f': syck_emitter_write( e, "\\f", 2 ); break; case '\r': syck_emitter_write( e, "\\r", 2 ); break; case '\t': syck_emitter_write( e, "\\t", 2 ); break; case '\v': syck_emitter_write( e, "\\v", 2 ); break; case 0x1b: syck_emitter_write( e, "\\e", 2 ); break; case '\n': end = mark + 1; syck_emitter_write( e, "\\n", 2 ); do_indent = 2; start = mark + 1; if ( start < str + len && ( *start == ' ' || *start == '\n' ) ) { do_indent = 0; } break; case ' ': if ( width > 0 && *start != ' ' && mark - end > width ) { do_indent = 1; end = mark + 1; } else { syck_emitter_write( e, " ", 1 ); } break; default: syck_emitter_escape( e, mark, 1 ); break; } mark++; } syck_emitter_write( e, "\"", 1 ); } /* * Outputs a literal block. */ void syck_emit_literal( SyckEmitter *e, char keep_nl, char *str, long len ) { char *mark = str; char *start = str; char *end = str; syck_emitter_write( e, "|", 1 ); if ( keep_nl == NL_CHOMP ) { syck_emitter_write( e, "-", 1 ); } else if ( keep_nl == NL_KEEP ) { syck_emitter_write( e, "+", 1 ); } syck_emit_indent( e ); while ( mark < str + len ) { if ( *mark == '\n' ) { end = mark; if ( *start != ' ' && *start != '\n' && *end != '\n' && *end != ' ' ) end += 1; syck_emitter_write( e, start, end - start ); if ( mark + 1 == str + len ) { if ( keep_nl != NL_KEEP ) syck_emitter_write( e, "\n", 1 ); } else { syck_emit_indent( e ); } start = mark + 1; } mark++; } end = str + len; if ( start < end ) { syck_emitter_write( e, start, end - start ); } } /* * Outputs a folded block. */ void syck_emit_folded( SyckEmitter *e, int width, char keep_nl, char *str, long len ) { char *mark = str; char *start = str; char *end = str; syck_emitter_write( e, ">", 1 ); if ( keep_nl == NL_CHOMP ) { syck_emitter_write( e, "-", 1 ); } else if ( keep_nl == NL_KEEP ) { syck_emitter_write( e, "+", 1 ); } syck_emit_indent( e ); if ( width <= 0 ) width = e->best_width; while ( mark < str + len ) { switch ( *mark ) { case '\n': syck_emitter_write( e, end, mark - end ); end = mark + 1; if ( *start != ' ' && *start != '\n' && *end != '\n' && *end != ' ' ) { syck_emitter_write( e, "\n", 1 ); } if ( mark + 1 == str + len ) { if ( keep_nl != NL_KEEP ) syck_emitter_write( e, "\n", 1 ); } else { syck_emit_indent( e ); } start = mark + 1; break; case ' ': if ( *start != ' ' ) { if ( mark - end > width ) { syck_emitter_write( e, end, mark - end ); syck_emit_indent( e ); end = mark + 1; } } break; } mark++; } if ( end < mark ) { syck_emitter_write( e, end, mark - end ); } } /* * Begins emission of a sequence. */ void syck_emit_seq( SyckEmitter *e, char *tag, enum seq_style style ) { SyckLevel *parent = syck_emitter_parent_level( e ); SyckLevel *lvl = syck_emitter_current_level( e ); syck_emit_tag( e, tag, "tag:yaml.org,2002:seq" ); if ( style == seq_inline || ( parent->status == syck_lvl_imap || parent->status == syck_lvl_iseq ) ) { syck_emitter_write( e, "[", 1 ); lvl->status = syck_lvl_iseq; } else { lvl->status = syck_lvl_seq; } } /* * Begins emission of a mapping. */ void syck_emit_map( SyckEmitter *e, char *tag, enum map_style style ) { SyckLevel *parent = syck_emitter_parent_level( e ); SyckLevel *lvl = syck_emitter_current_level( e ); syck_emit_tag( e, tag, "tag:yaml.org,2002:map" ); if ( style == map_inline || ( parent->status == syck_lvl_imap || parent->status == syck_lvl_iseq ) ) { syck_emitter_write( e, "{", 1 ); lvl->status = syck_lvl_imap; } else { lvl->status = syck_lvl_map; } } /* * Handles emitting of a collection item (for both * sequences and maps) */ void syck_emit_item( SyckEmitter *e, st_data_t n ) { SyckLevel *lvl = syck_emitter_current_level( e ); switch ( lvl->status ) { case syck_lvl_seq: { SyckLevel *parent = syck_emitter_parent_level( e ); /* seq-in-map shortcut */ if ( parent->status == syck_lvl_map && lvl->ncount == 0 ) { /* complex key */ if ( parent->ncount % 2 == 1 ) { syck_emitter_write( e, "?", 1 ); parent->status = syck_lvl_mapx; /* shortcut -- the lvl->anctag check should be unneccesary but * there is a nasty shift/reduce in the parser on this point and * i'm not ready to tickle it. */ } else if ( lvl->anctag == 0 ) { lvl->spaces = parent->spaces; } } /* seq-in-seq shortcut */ else if ( lvl->anctag == 0 && parent->status == syck_lvl_seq && lvl->ncount == 0 ) { int spcs = ( lvl->spaces - parent->spaces ) - 2; if ( spcs >= 0 ) { int i = 0; for ( i = 0; i < spcs; i++ ) { syck_emitter_write( e, " ", 1 ); } syck_emitter_write( e, "- ", 2 ); break; } } syck_emit_indent( e ); syck_emitter_write( e, "- ", 2 ); } break; case syck_lvl_iseq: { if ( lvl->ncount > 0 ) { syck_emitter_write( e, ", ", 2 ); } } break; case syck_lvl_map: { SyckLevel *parent = syck_emitter_parent_level( e ); /* map-in-map */ if ( parent->status == syck_lvl_map && lvl->ncount == 0 ) { /* complex key */ if ( parent->ncount % 2 == 1 ) { syck_emitter_write( e, "?", 1 ); parent->status = syck_lvl_mapx; } } /* map-in-seq shortcut */ if ( lvl->anctag == 0 && parent->status == syck_lvl_seq && lvl->ncount == 0 ) { int spcs = ( lvl->spaces - parent->spaces ) - 2; if ( spcs >= 0 ) { int i = 0; for ( i = 0; i < spcs; i++ ) { syck_emitter_write( e, " ", 1 ); } break; } } if ( lvl->ncount % 2 == 0 ) { syck_emit_indent( e ); } else { syck_emitter_write( e, ": ", 2 ); } } break; case syck_lvl_mapx: { if ( lvl->ncount % 2 == 0 ) { syck_emit_indent( e ); lvl->status = syck_lvl_map; } else { int i; if ( lvl->spaces > 0 ) { char *spcs = S_ALLOC_N( char, lvl->spaces + 1 ); spcs[lvl->spaces] = '\0'; for ( i = 0; i < lvl->spaces; i++ ) spcs[i] = ' '; syck_emitter_write( e, spcs, lvl->spaces ); S_FREE( spcs ); } syck_emitter_write( e, ": ", 2 ); } } break; case syck_lvl_imap: { if ( lvl->ncount > 0 ) { if ( lvl->ncount % 2 == 0 ) { syck_emitter_write( e, ", ", 2 ); } else { syck_emitter_write( e, ": ", 2 ); } } } break; } lvl->ncount++; syck_emit( e, n ); } /* * Closes emission of a collection. */ void syck_emit_end( SyckEmitter *e ) { SyckLevel *lvl = syck_emitter_current_level( e ); SyckLevel *parent = syck_emitter_parent_level( e ); switch ( lvl->status ) { case syck_lvl_seq: if ( lvl->ncount == 0 ) { syck_emitter_write( e, "[]\n", 3 ); } else if ( parent->status == syck_lvl_mapx ) { syck_emitter_write( e, "\n", 1 ); } break; case syck_lvl_iseq: syck_emitter_write( e, "]\n", 1 ); break; case syck_lvl_map: if ( lvl->ncount == 0 ) { syck_emitter_write( e, "{}\n", 3 ); } else if ( lvl->ncount % 2 == 1 ) { syck_emitter_write( e, ":\n", 1 ); } else if ( parent->status == syck_lvl_mapx ) { syck_emitter_write( e, "\n", 1 ); } break; case syck_lvl_imap: syck_emitter_write( e, "}\n", 1 ); break; } } /* * Fill markers table with emitter nodes in the * soon-to-be-emitted tree. */ SYMID syck_emitter_mark_node( SyckEmitter *e, st_data_t n ) { SYMID oid = 0; char *anchor_name = NULL; /* * Ensure markers table is initialized. */ if ( e->markers == NULL ) { e->markers = st_init_numtable(); } /* * Markers table initially marks the string position of the * object. Doesn't yet create an anchor, simply notes the * position. */ if ( ! st_lookup( e->markers, n, (st_data_t *)&oid ) ) { /* * Store all markers */ oid = e->markers->num_entries + 1; st_insert( e->markers, n, (st_data_t)oid ); } else { if ( e->anchors == NULL ) { e->anchors = st_init_numtable(); } if ( ! st_lookup( e->anchors, (st_data_t)oid, (st_data_t *)&anchor_name ) ) { int idx = 0; char *anc = ( e->anchor_format == NULL ? DEFAULT_ANCHOR_FORMAT : e->anchor_format ); /* * Second time hitting this object, let's give it an anchor */ idx = e->anchors->num_entries + 1; anchor_name = S_ALLOC_N( char, strlen( anc ) + 10 ); S_MEMZERO( anchor_name, char, strlen( anc ) + 10 ); sprintf( anchor_name, anc, idx ); /* * Insert into anchors table */ st_insert( e->anchors, (st_data_t)oid, (st_data_t)anchor_name ); } } return oid; } ruby-mkrf-0.2.3.orig/test/sample_files/syck-0.55/ext/ruby/ext/syck/syck.h0000644000000000000000000002767711672453175024561 0ustar rootroot/* * syck.h * * $Author: why $ * $Date: 2005/04/13 06:27:54 $ * * Copyright (C) 2003 why the lucky stiff */ #ifndef SYCK_H #define SYCK_H #define SYCK_YAML_MAJOR 1 #define SYCK_YAML_MINOR 0 #define SYCK_VERSION "0.55" #define YAML_DOMAIN "yaml.org,2002" #include #include #ifdef HAVE_ST_H #include #else #include "syck_st.h" #endif #if defined(__cplusplus) extern "C" { #endif /* * Memory Allocation */ #if defined(HAVE_ALLOCA_H) && !defined(__GNUC__) #include #endif #if DEBUG void syck_assert( char *, unsigned ); # define ASSERT(f) \ if ( f ) \ {} \ else \ syck_assert( __FILE__, __LINE__ ) #else # define ASSERT(f) #endif #ifndef NULL # define NULL (void *)0 #endif #define ALLOC_CT 8 #define SYCK_BUFFERSIZE 4096 #define S_ALLOC_N(type,n) (type*)malloc(sizeof(type)*(n)) #define S_ALLOC(type) (type*)malloc(sizeof(type)) #define S_REALLOC_N(var,type,n) (var)=(type*)realloc((char*)(var),sizeof(type)*(n)) #define S_FREE(n) free(n); n = NULL; #define S_ALLOCA_N(type,n) (type*)alloca(sizeof(type)*(n)) #define S_MEMZERO(p,type,n) memset((p), 0, sizeof(type)*(n)) #define S_MEMCPY(p1,p2,type,n) memcpy((p1), (p2), sizeof(type)*(n)) #define S_MEMMOVE(p1,p2,type,n) memmove((p1), (p2), sizeof(type)*(n)) #define S_MEMCMP(p1,p2,type,n) memcmp((p1), (p2), sizeof(type)*(n)) #define BLOCK_FOLD 10 #define BLOCK_LIT 20 #define BLOCK_PLAIN 30 #define NL_CHOMP 40 #define NL_KEEP 50 /* * Node definitions */ #ifndef ST_DATA_T_DEFINED typedef long st_data_t; #endif #define SYMID unsigned long typedef struct _syck_node SyckNode; enum syck_kind_tag { syck_map_kind, syck_seq_kind, syck_str_kind }; enum map_part { map_key, map_value }; enum map_style { map_none, map_inline }; enum seq_style { seq_none, seq_inline }; enum scalar_style { scalar_none, scalar_1quote, scalar_2quote, scalar_fold, scalar_literal, scalar_plain }; /* * Node metadata struct */ struct _syck_node { /* Symbol table ID */ SYMID id; /* Underlying kind */ enum syck_kind_tag kind; /* Fully qualified tag-uri for type */ char *type_id; /* Anchor name */ char *anchor; union { /* Storage for map data */ struct SyckMap { enum map_style style; SYMID *keys; SYMID *values; long capa; long idx; } *pairs; /* Storage for sequence data */ struct SyckSeq { enum seq_style style; SYMID *items; long capa; long idx; } *list; /* Storage for string data */ struct SyckStr { enum scalar_style style; char *ptr; long len; } *str; } data; /* Shortcut node */ void *shortcut; }; /* * Parser definitions */ typedef struct _syck_parser SyckParser; typedef struct _syck_file SyckIoFile; typedef struct _syck_str SyckIoStr; typedef struct _syck_level SyckLevel; typedef SYMID (*SyckNodeHandler)(SyckParser *, SyckNode *); typedef void (*SyckErrorHandler)(SyckParser *, char *); typedef SyckNode * (*SyckBadAnchorHandler)(SyckParser *, char *); typedef long (*SyckIoFileRead)(char *, SyckIoFile *, long, long); typedef long (*SyckIoStrRead)(char *, SyckIoStr *, long, long); enum syck_io_type { syck_io_str, syck_io_file }; enum syck_parser_input { syck_yaml_utf8, syck_yaml_utf16, syck_yaml_utf32, syck_bytecode_utf8 }; enum syck_level_status { syck_lvl_header, syck_lvl_doc, syck_lvl_open, syck_lvl_seq, syck_lvl_map, syck_lvl_block, syck_lvl_str, syck_lvl_iseq, syck_lvl_imap, syck_lvl_end, syck_lvl_pause, syck_lvl_anctag, syck_lvl_mapx, syck_lvl_seqx }; /* * Parser structs */ struct _syck_file { /* File pointer */ FILE *ptr; /* Function which FILE -> buffer */ SyckIoFileRead read; }; struct _syck_str { /* String buffer pointers */ char *beg, *ptr, *end; /* Function which string -> buffer */ SyckIoStrRead read; }; struct _syck_level { /* Indent */ int spaces; /* Counts nodes emitted at this level, useful for parsing * keys and pairs in bytecode */ int ncount; /* Does node have anchors or tags? */ int anctag; /* Domain prefixing at the given level */ char *domain; /* Keeps a node status */ enum syck_level_status status; }; struct _syck_parser { /* Root node */ SYMID root, root_on_error; /* Implicit typing flag */ int implicit_typing, taguri_expansion; /* Scripting language function to handle nodes */ SyckNodeHandler handler; /* Error handler */ SyckErrorHandler error_handler; /* InvalidAnchor handler */ SyckBadAnchorHandler bad_anchor_handler; /* Parser input type */ enum syck_parser_input input_type; /* IO type */ enum syck_io_type io_type; /* Custom buffer size */ size_t bufsize; /* Buffer pointers */ char *buffer, *linectptr, *lineptr, *toktmp, *token, *cursor, *marker, *limit; /* Line counter */ int linect; /* Last token from yylex() */ int last_token; /* Force a token upon next call to yylex() */ int force_token; /* EOF flag */ int eof; union { SyckIoFile *file; SyckIoStr *str; } io; /* Symbol table for anchors */ st_table *anchors, *bad_anchors; /* Optional symbol table for SYMIDs */ st_table *syms; /* Levels of indentation */ SyckLevel *levels; int lvl_idx; int lvl_capa; /* Pointer for extension's use */ void *bonus; }; /* * Emitter definitions */ typedef struct _syck_emitter SyckEmitter; typedef struct _syck_emitter_node SyckEmitterNode; typedef void (*SyckOutputHandler)(SyckEmitter *, char *, long); typedef void (*SyckEmitterHandler)(SyckEmitter *, st_data_t); enum doc_stage { doc_open, doc_need_header, doc_processing }; /* * Emitter struct */ struct _syck_emitter { /* Headerless doc flag */ int headless; /* Force header? */ int use_header; /* Force version? */ int use_version; /* Sort hash keys */ int sort_keys; /* Anchor format */ char *anchor_format; /* Explicit typing on all collections? */ int explicit_typing; /* Best width on folded scalars */ int best_width; /* Use literal[1] or folded[2] blocks on all text? */ enum scalar_style style; /* Stage of written document */ enum doc_stage stage; /* Level counter */ int level; /* Default indentation */ int indent; /* Object ignore ID */ SYMID ignore_id; /* Symbol table for anchors */ st_table *markers, *anchors, *anchored; /* Custom buffer size */ size_t bufsize; /* Buffer */ char *buffer, *marker; /* Absolute position of the buffer */ long bufpos; /* Handler for emitter nodes */ SyckEmitterHandler emitter_handler; /* Handler for output */ SyckOutputHandler output_handler; /* Levels of indentation */ SyckLevel *levels; int lvl_idx; int lvl_capa; /* Pointer for extension's use */ void *bonus; }; /* * Emitter node metadata struct */ struct _syck_emitter_node { /* Node buffer position */ long pos; /* Current indent */ long indent; /* Collection? */ int is_shortcut; }; /* * Handler prototypes */ SYMID syck_hdlr_add_node( SyckParser *, SyckNode * ); SyckNode *syck_hdlr_add_anchor( SyckParser *, char *, SyckNode * ); void syck_hdlr_remove_anchor( SyckParser *, char * ); SyckNode *syck_hdlr_get_anchor( SyckParser *, char * ); void syck_add_transfer( char *, SyckNode *, int ); char *syck_xprivate( char *, int ); char *syck_taguri( char *, char *, int ); int syck_tagcmp( char *, char * ); int syck_add_sym( SyckParser *, char * ); int syck_lookup_sym( SyckParser *, SYMID, char ** ); int syck_try_implicit( SyckNode * ); char *syck_type_id_to_uri( char * ); void try_tag_implicit( SyckNode *, int ); char *syck_match_implicit( char *, size_t ); /* * API prototypes */ char *syck_strndup( char *, long ); long syck_io_file_read( char *, SyckIoFile *, long, long ); long syck_io_str_read( char *, SyckIoStr *, long, long ); char *syck_base64enc( char *, long ); char *syck_base64dec( char *, long ); SyckEmitter *syck_new_emitter(); SYMID syck_emitter_mark_node( SyckEmitter *, st_data_t ); void syck_emitter_ignore_id( SyckEmitter *, SYMID ); void syck_output_handler( SyckEmitter *, SyckOutputHandler ); void syck_emitter_handler( SyckEmitter *, SyckEmitterHandler ); void syck_free_emitter( SyckEmitter * ); void syck_emitter_clear( SyckEmitter * ); void syck_emitter_write( SyckEmitter *, char *, long ); void syck_emitter_escape( SyckEmitter *, char *, long ); void syck_emitter_flush( SyckEmitter *, long ); void syck_emit( SyckEmitter *, st_data_t ); void syck_emit_scalar( SyckEmitter *, char *, enum scalar_style, int, int, char, char *, long ); void syck_emit_1quoted( SyckEmitter *, int, char *, long ); void syck_emit_2quoted( SyckEmitter *, int, char *, long ); void syck_emit_folded( SyckEmitter *, int, char, char *, long ); void syck_emit_literal( SyckEmitter *, char, char *, long ); void syck_emit_seq( SyckEmitter *, char *, enum seq_style ); void syck_emit_item( SyckEmitter *, st_data_t ); void syck_emit_map( SyckEmitter *, char *, enum map_style ); void syck_emit_end( SyckEmitter * ); void syck_emit_tag( SyckEmitter *, char *, char * ); void syck_emit_indent( SyckEmitter * ); SyckLevel *syck_emitter_current_level( SyckEmitter * ); SyckLevel *syck_emitter_parent_level( SyckEmitter * ); void syck_emitter_pop_level( SyckEmitter * ); void syck_emitter_add_level( SyckEmitter *, int, enum syck_level_status ); void syck_emitter_reset_levels( SyckEmitter * ); SyckParser *syck_new_parser(); void syck_free_parser( SyckParser * ); void syck_parser_set_root_on_error( SyckParser *, SYMID ); void syck_parser_implicit_typing( SyckParser *, int ); void syck_parser_taguri_expansion( SyckParser *, int ); int syck_scan_scalar( int, char *, long ); void syck_parser_handler( SyckParser *, SyckNodeHandler ); void syck_parser_error_handler( SyckParser *, SyckErrorHandler ); void syck_parser_bad_anchor_handler( SyckParser *, SyckBadAnchorHandler ); void syck_parser_file( SyckParser *, FILE *, SyckIoFileRead ); void syck_parser_str( SyckParser *, char *, long, SyckIoStrRead ); void syck_parser_str_auto( SyckParser *, char *, SyckIoStrRead ); SyckLevel *syck_parser_current_level( SyckParser * ); void syck_parser_add_level( SyckParser *, int, enum syck_level_status ); void syck_parser_pop_level( SyckParser * ); void free_any_io( SyckParser * ); long syck_parser_read( SyckParser * ); long syck_parser_readlen( SyckParser *, long ); SYMID syck_parse( SyckParser * ); void syck_default_error_handler( SyckParser *, char * ); SYMID syck_yaml2byte_handler( SyckParser *, SyckNode * ); char *syck_yaml2byte( char * ); /* * Allocation prototypes */ SyckNode *syck_alloc_map(); SyckNode *syck_alloc_seq(); SyckNode *syck_alloc_str(); void syck_free_node( SyckNode * ); void syck_free_members( SyckNode * ); SyckNode *syck_new_str( char *, enum scalar_style ); SyckNode *syck_new_str2( char *, long, enum scalar_style ); void syck_replace_str( SyckNode *, char *, enum scalar_style ); void syck_replace_str2( SyckNode *, char *, long, enum scalar_style ); void syck_str_blow_away_commas( SyckNode * ); char *syck_str_read( SyckNode * ); SyckNode *syck_new_map( SYMID, SYMID ); void syck_map_empty( SyckNode * ); void syck_map_add( SyckNode *, SYMID, SYMID ); SYMID syck_map_read( SyckNode *, enum map_part, long ); void syck_map_assign( SyckNode *, enum map_part, long, SYMID ); long syck_map_count( SyckNode * ); void syck_map_update( SyckNode *, SyckNode * ); SyckNode *syck_new_seq( SYMID ); void syck_seq_empty( SyckNode * ); void syck_seq_add( SyckNode *, SYMID ); void syck_seq_assign( SyckNode *, long, SYMID ); SYMID syck_seq_read( SyckNode *, long ); long syck_seq_count( SyckNode * ); /* * Lexer prototypes */ void syckerror( char * ); #if defined(__cplusplus) } /* extern "C" { */ #endif #endif /* ifndef SYCK_H */ ruby-mkrf-0.2.3.orig/test/sample_files/syck-0.55/ext/ruby/ext/syck/yamlbyte.h0000644000000000000000000001506011672453175025415 0ustar rootroot/* yamlbyte.h * * The YAML bytecode "C" interface header file. See the YAML bytecode * reference for bytecode sequence rules and for the meaning of each * bytecode. */ #ifndef YAMLBYTE_H #define YAMLBYTE_H #include /* define what a character is */ typedef unsigned char yamlbyte_utf8_t; typedef unsigned short yamlbyte_utf16_t; #ifdef YAMLBYTE_UTF8 #ifdef YAMLBYTE_UTF16 #error Must only define YAMLBYTE_UTF8 or YAMLBYTE_UTF16 #endif typedef yamlbyte_utf8_t yamlbyte_char_t; #else #ifdef YAMLBYTE_UTF16 typedef yamlbyte_utf16_t yamlbyte_char_t; #else #error Must define YAMLBYTE_UTF8 or YAMLBYTE_UTF16 #endif #endif /* specify list of bytecodes */ #define YAMLBYTE_FINISH ((yamlbyte_char_t) 0) #define YAMLBYTE_DOCUMENT ((yamlbyte_char_t)'D') #define YAMLBYTE_DIRECTIVE ((yamlbyte_char_t)'V') #define YAMLBYTE_PAUSE ((yamlbyte_char_t)'P') #define YAMLBYTE_MAPPING ((yamlbyte_char_t)'M') #define YAMLBYTE_SEQUENCE ((yamlbyte_char_t)'Q') #define YAMLBYTE_END_BRANCH ((yamlbyte_char_t)'E') #define YAMLBYTE_SCALAR ((yamlbyte_char_t)'S') #define YAMLBYTE_CONTINUE ((yamlbyte_char_t)'C') #define YAMLBYTE_NEWLINE ((yamlbyte_char_t)'N') #define YAMLBYTE_NULLCHAR ((yamlbyte_char_t)'Z') #define YAMLBYTE_ANCHOR ((yamlbyte_char_t)'A') #define YAMLBYTE_ALIAS ((yamlbyte_char_t)'R') #define YAMLBYTE_TRANSFER ((yamlbyte_char_t)'T') /* formatting bytecodes */ #define YAMLBYTE_COMMENT ((yamlbyte_char_t)'c') #define YAMLBYTE_INDENT ((yamlbyte_char_t)'i') #define YAMLBYTE_STYLE ((yamlbyte_char_t)'s') /* other bytecodes */ #define YAMLBYTE_LINE_NUMBER ((yamlbyte_char_t)'#') #define YAMLBYTE_WHOLE_SCALAR ((yamlbyte_char_t)'<') #define YAMLBYTE_NOTICE ((yamlbyte_char_t)'!') #define YAMLBYTE_SPAN ((yamlbyte_char_t)')') #define YAMLBYTE_ALLOC ((yamlbyte_char_t)'@') /* second level style bytecodes, ie "s>" */ #define YAMLBYTE_FLOW ((yamlbyte_char_t)'>') #define YAMLBYTE_LITERAL ((yamlbyte_char_t)'|') #define YAMLBYTE_BLOCK ((yamlbyte_char_t)'b') #define YAMLBYTE_PLAIN ((yamlbyte_char_t)'p') #define YAMLBYTE_INLINE_MAPPING ((yamlbyte_char_t)'{') #define YAMLBYTE_INLINE_SEQUENCE ((yamlbyte_char_t)'[') #define YAMLBYTE_SINGLE_QUOTED ((yamlbyte_char_t)39) #define YAMLBYTE_DOUBLE_QUOTED ((yamlbyte_char_t)'"') /* * The "C" API has two variants, one based on instructions, * with events delivered via pointers; and the other one * is character based where one or more instructions are * serialized into a buffer. * * Note: In the instruction based API, WHOLE_SCALAR does * not have the ' true, :UseHeader => true, :SortKeys => true ) # ) ) # yb = YAML::Parser.new # yb.resolver = YAML.resolver # yb.input = :bytecode # assert_equal( obj, yb.load( YAML::Syck::compile( yaml ) ) ) end # # Test bytecode parser # def assert_bytecode( obj, yaml ) # assert_equal( obj, YAML::Syck::Parser.new( :Input => :Bytecode ).load( yaml ) ) end # # Test parser only # def assert_parse_only( obj, yaml ) assert_equal( obj, YAML::load( yaml ) ) # assert_equal( obj, YAML::parse( yaml ).transform ) # assert_equal( obj, YAML::Syck::Parser.new( :Input => :Bytecode ).load( YAML::Syck::compile( yaml ) ) ) end def assert_path_segments( path, segments ) YAML::YPath.each_path( path ) { |choice| assert_equal( choice.segments, segments.shift ) } assert_equal( segments.length, 0, "Some segments leftover: #{ segments.inspect }" ) end # # Make a time with the time zone # def mktime( year, mon, day, hour, min, sec, usec, zone = "Z" ) usec = usec.to_s.to_f * 1000000 val = Time::utc( year.to_i, mon.to_i, day.to_i, hour.to_i, min.to_i, sec.to_i, usec ) if zone != "Z" hour = zone[0,3].to_i * 3600 min = zone[3,2].to_i * 60 ofs = (hour + min) val = Time.at( val.to_f - ofs ) end return val end # # Tests modified from 00basic.t in YAML.pm # def test_basic_map # Simple map map = { 'one' => 'foo', 'three' => 'baz', 'two' => 'bar' } assert_to_yaml( map, < 'simple string', 2 => 42, 3 => '1 Single Quoted String', 4 => 'YAML\'s Double "Quoted" String', 5 => "A block\n with several\n lines.\n", 6 => "A \"chomped\" block", 7 => "A folded\n string\n" } assert_to_yaml( basic, < A folded string EOY ) assert_bytecode( basic, "D\nM\nS1\nSsimple string\nS2\nS42\nS3\nS1 Single Quoted String\nS4\nSYAML's Double \"Quoted\" String\n" + "S5\nSA block\nN\nC with several\nN\nC lines.\nN\nS6\nSA \"chomped\" block\nS7\nSA folded\nN\nC string\nN\nE\n" ) end # # Test the specification examples # - Many examples have been changes because of whitespace problems that # caused the two to be inequivalent, or keys to be sorted wrong # def test_spec_simple_implicit_sequence # Simple implicit sequence seq = [ 'Mark McGwire', 'Sammy Sosa', 'Ken Griffey' ] assert_to_yaml( seq, < 65, 'avg' => 0.278, 'rbi' => 147 } assert_to_yaml( map, < [ 'Boston Red Sox', 'Detroit Tigers', 'New York Yankees' ], 'national' => [ 'New York Mets', 'Chicago Cubs', 'Atlanta Braves' ] } assert_to_yaml( nest, < 'Mark McGwire', 'hr' => 65, 'avg' => 0.278}, {'name' => 'Sammy Sosa', 'hr' => 63, 'avg' => 0.288} ] assert_to_yaml( nest, < { 'hr' => 65, 'avg' => 0.278 }, 'Sammy Sosa' => { 'hr' => 63, 'avg' => 0.288 } } assert_to_yaml( map, < [ 'Mark McGwire', 'Sammy Sosa' ], 'rbi' => [ 'Sammy Sosa', 'Ken Griffey' ] } assert_to_yaml( nest, < [ 'Mark McGwire', 'Sammy Sosa' ], 'rbi' => [ 'Sammy Sosa', 'Ken Griffey' ] } assert_to_yaml( anc1, <"EDI", "departure"=>"LAX", "fareref"=>"DOGMA", "currency"=>"GBP"}, {"arrival"=>"MEL", "departure"=>"SYD", "fareref"=>"MADF", "currency"=>"AUD"}, {"arrival"=>"MCO", "departure"=>"JFK", "fareref"=>"DFSF", "currency"=>"USD"}] assert_to_yaml( anc2, <["fareref", "currency", "departure", "arrival"], "FARES"=>[{"arrival"=>"EDI", "departure"=>"LAX", "fareref"=>"DOGMA", "currency"=>"GBP"}, {"arrival"=>"MEL", "departure"=>"SYD", "fareref"=>"MADF", "currency"=>"AUD"}, {"arrival"=>"MCO", "departure"=>"JFK", "fareref"=>"DFSF", "currency"=>"USD"}]} assert_to_yaml( anc3, < [ Date.new( 2001, 7, 23 ) ], [ 'New York Yankees', 'Atlanta Braves' ] => [ Date.new( 2001, 7, 2 ), Date.new( 2001, 8, 12 ), Date.new( 2001, 8, 14 ) ] } assert_to_yaml( complex, < [ Date.new( 2001, 7, 2 ), Date.new( 2001, 8, 12 ), Date.new( 2001, 8, 14 ) ], [ 'Detroit Tigers', 'Chicago Cubs' ] => [ Date.new( 2001, 7, 23 ) ] }, < 34843, 'date' => Date.new( 2001, 1, 23 ), 'bill-to' => 'Chris Dumars', 'product' => [ { 'item' => 'Super Hoop', 'quantity' => 1 }, { 'item' => 'Basketball', 'quantity' => 4 }, { 'item' => 'Big Shoes', 'quantity' => 1 } ] } assert_to_yaml( seq, < nil }, [ { 'five' => [ 'six' ] } ], [ 'seven' ] ], [ 'eight', 'nine' ] ] assert_to_yaml( seq, < Mark McGwire's year was crippled by a knee injury. EOY # Force a few elaborate folded blocks assert_to_yaml( [ < ruby -ropen-uri -e 'eval(open("http://go.hobix.com/").read)' STR1 Ok, so the idea here is that one whole weblahhg is contained in a single directory. What is stored in the directory?
  hobix.yaml <- configuration

  entries/   <- edit and organize
                your news items,
                articles and so on.

  skel/      <- contains your
                templates

  htdocs/    <- html is created here,
                store all your images here,
                this is your viewable
                websyht

  lib/       <- extra hobix libraries
                (plugins) go here
One weblahhg can be shared with many authors. In the @hobix.yaml@ file, you can store information about each author, as well as information about others who contribute to your websyht. You also have a file of your own, a configuration file called @.hobixrc@, which contains a list of the weblahhgs you belong to. h2. Pull One From the Sky If you would like to create your own weblahhg from scratch:
  hobix create blahhg
You will be prompted for a full path where the new weblahhg can be created. Don't worry if the directory doesn't yet exist, Hobix will take care of clearing a path for it. Once you give it the path, Hobix will create all the necessary directories, as well as the @hobix.yaml@. You should also have a set of sample templates to get you started. In fact, if you want to generate the default site:
  hobix regen blahhg
h2. Joining Hands With Others To join an existing weblahhg:
  hobix add other-blahhg /path/to/other-blahhg/hobix.yaml
You don't need to be on a weblahhg's author list to join the weblahhg. You just need permissions to edit the file. h2. Leaving in a Cloud of Keystrokes To remove a weblahhg from your configuration:
  hobix del other-blahhg
Please don't be afraid to edit your configuration file yourself, should the commandline not suit your style. See, here's my @.hobixrc@:
  --- 
  weblogs: 
    hobix: /usr/local/www/hobix.com/www/hobix.yaml
    why: /usr/local/www/whytheluckystiff.net/www/hobix.yaml
  username: why
  use editor: true
That's a YAML file. Very simple to edit. You can manually edit your information, safely add or edit your weblogs, and save back to your @.hobixrc@. Enough then. Time to trick out your new Hoblahhg. You will be guided through an automatic installation (or upgrade) of Hobix. STR2 NOW BUNKING together: the ruby and yaml scripts up at http://go.hobix.com/. Hey ,, the life and unity we feel is tree-mending-US!! Try:
  ruby -ropen-uri -e'eval open("http://go.hobix.com/").read'
No longer are curtains dropping ON the actors' arms !! No longer are swift currents ON the actors' legs !! The actors have a bottomless cereal container, witness. STR3 --- - |+ Here's what you're going to need: * Ruby 1.8.0 or higher. ** Linux/FreeBSD: "Get the latest, please.":http://ruby-lang.org/en/20020102.html ** Windows: "Installer for Windows.":http://rubyinstaller.sourceforge.net/ ** Mac: "OSX disk image.":http://homepage.mac.com/discord/Ruby/ * Once Ruby is installed, open a command prompt and type:
    ruby -ropen-uri -e 'eval(open("http://go.hobix.com/").read)'
  
- | Ok, so the idea here is that one whole weblahhg is contained in a single directory. What is stored in the directory?
    hobix.yaml <- configuration

    entries/   <- edit and organize
                  your news items,
                  articles and so on.

    skel/      <- contains your
                  templates

    htdocs/    <- html is created here,
                  store all your images here,
                  this is your viewable
                  websyht

    lib/       <- extra hobix libraries
                  (plugins) go here
  
One weblahhg can be shared with many authors. In the @hobix.yaml@ file, you can store information about each author, as well as information about others who contribute to your websyht. You also have a file of your own, a configuration file called @.hobixrc@, which contains a list of the weblahhgs you belong to. h2. Pull One From the Sky If you would like to create your own weblahhg from scratch:
    hobix create blahhg
  
You will be prompted for a full path where the new weblahhg can be created. Don't worry if the directory doesn't yet exist, Hobix will take care of clearing a path for it. Once you give it the path, Hobix will create all the necessary directories, as well as the @hobix.yaml@. You should also have a set of sample templates to get you started. In fact, if you want to generate the default site:
    hobix regen blahhg
  
h2. Joining Hands With Others To join an existing weblahhg:
    hobix add other-blahhg /path/to/other-blahhg/hobix.yaml
  
You don't need to be on a weblahhg's author list to join the weblahhg. You just need permissions to edit the file. h2. Leaving in a Cloud of Keystrokes To remove a weblahhg from your configuration:
    hobix del other-blahhg
  
Please don't be afraid to edit your configuration file yourself, should the commandline not suit your style. See, here's my @.hobixrc@:
    --- 
    weblogs: 
      hobix: /usr/local/www/hobix.com/www/hobix.yaml
      why: /usr/local/www/whytheluckystiff.net/www/hobix.yaml
    username: why
    use editor: true
  
That's a YAML file. Very simple to edit. You can manually edit your information, safely add or edit your weblogs, and save back to your @.hobixrc@. Enough then. Time to trick out your new Hoblahhg. You will be guided through an automatic installation (or upgrade) of Hobix. - | NOW BUNKING together: the ruby and yaml scripts up at http://go.hobix.com/. Hey ,, the life and unity we feel is tree-mending-US!! Try:
    ruby -ropen-uri -e'eval open("http://go.hobix.com/").read'
  
No longer are curtains dropping ON the actors' arms !! No longer are swift currents ON the actors' legs !! The actors have a bottomless cereal container, witness. EOY assert_bytecode( fold, "D\nQ\nSMark McGwire's year was crippled by a knee injury.\nN\nE\n" ) end def test_spec_preserve_indent # Preserve indented spaces fold = "Sammy Sosa completed another fine season with great stats.\n\n 63 Home Runs\n 0.288 Batting Average\n\nWhat a year!\n" assert_to_yaml( fold, < Sammy Sosa completed another fine season with great stats. 63 Home Runs 0.288 Batting Average What a year! EOY assert_bytecode( fold, "D\nSSammy Sosa completed another fine season with great stats.\nN\nN\nC 63 Home Runs\nN\nC 0.288 Batting Average\nN\nN\nCWhat a year!\nN\n" ) end def test_spec_indentation_determines_scope map = { 'name' => 'Mark McGwire', 'accomplishment' => "Mark set a major league home run record in 1998.\n", 'stats' => "65 Home Runs\n0.278 Batting Average\n" } assert_to_yaml( map, < Mark set a major league home run record in 1998. stats: | 65 Home Runs 0.278 Batting Average EOY assert_bytecode( map, "D\nM\nSname\nSMark McGwire\nSaccomplishment\nSMark set a major league home run record in 1998.\nN\nSstats\nS65 Home Runs\nN\nC0.278 Batting Average\nN\nE\n" ) end # def test_spec_quoted_scalars # assert_to_yaml( # {"tie-fighter"=>"|\\-*-/|", "control"=>"\0101998\t1999\t2000\n", "unicode"=>"Sosa did fine." + ["263A".hex].pack('U*'), "quoted"=>" # not a 'comment'.", "single"=>"\"Howdy!\" he cried.", "hexesc"=>"\r\n is \r\n"}, < 'This unquoted scalar spans many lines.', 'quoted' => "So does this quoted scalar.\n" } assert_to_yaml( map, < 12345, 'decimal' => 12345, 'octal' => '014'.oct, 'hexadecimal' => '0xC'.hex } assert_to_yaml( map, < 685230, 'decimal' => 685230, 'octal' => '02472256'.oct, 'hexadecimal' => '0x0A74AE'.hex, 'sexagesimal' => 685230 } assert_to_yaml( map, < 1230.15, 'exponential' => 1230.15, 'fixed' => 1230.15, 'sexagecimal' => 1230.15, 'negative infinity' => -1.0/0.0 } assert_to_yaml( map, < nil, true => true, false => false, 'string' => '12345' } assert_to_yaml( map, < 'Chris', 'family' => 'Dumars', 'address' => { 'lines' => "458 Walkman Dr.\nSuite #292\n", 'city' => 'Royal Oak', 'state' => 'MI', 'postal' => 48046 } } invoice = { 'invoice' => 34843, 'date' => Date.new( 2001, 1, 23 ), 'bill-to' => id001, 'ship-to' => id001, 'product' => [ { 'sku' => 'BL394D', 'quantity' => 4, 'description' => 'Basketball', 'price' => 450.00 }, { 'sku' => 'BL4438H', 'quantity' => 1, 'description' => 'Super Hoop', 'price' => 2392.00 } ], 'tax' => 251.42, 'total' => 4443.52, 'comments' => "Late afternoon is best. Backup contact is Nancy Billsmer @ 338-4338.\n" } assert_to_yaml( invoice, < Late afternoon is best. Backup contact is Nancy Billsmer @ 338-4338. EOY assert_bytecode( invoice, "D\nM\nSinvoice\nS34843\nSdate\nS2001-01-23\nSbill-to\nAid001\n" + "M\nSgiven\nSChris\nSfamily\nT!str\nSDumars\nSaddress\n" + "M\nSlines\nS458 Walkman Dr.\nN\nCSuite #292\nN\nScity\nSRoyal Oak\nSstate\nSMI\nSpostal\nS48046\nE\nE\n" + "Sship-to\nRid001\nSproduct\nQ\n" + "T!map\nM\nSsku\nSBL394D\nSquantity\nS4\nSdescription\nSBasketball\nSprice\nS450.00\nE\n" + "M\nSsku\nSBL4438H\nSquantity\nS1\nSdescription\nSSuper Hoop\nSprice\nS2392.00\nE\nE\n" + "Stax\nS251.42\nStotal\nS4443.52\nScomments\n" + "SLate afternoon is best. Backup contact is Nancy Billsmer @ 338-4338.\nN\n" + "E\n" ) end def test_spec_log_file doc_ct = 0 doc1 = { 'Time' => mktime( 2001, 11, 23, 15, 01, 42, 00, "-05:00" ), 'User' => 'ed', 'Warning' => "This is an error message for the log file\n" } doc2 = { 'Time' => mktime( 2001, 11, 23, 15, 02, 31, 00, "-05:00" ), 'User' => 'ed', 'Warning' => "A slightly different error message.\n" } doc3 = { 'Date' => mktime( 2001, 11, 23, 15, 03, 17, 00, "-05:00" ), 'User' => 'ed', 'Fatal' => "Unknown variable \"bar\"\n", 'Stack' => [ { 'file' => 'TopClass.py', 'line' => 23, 'code' => "x = MoreObject(\"345\\n\")\n" }, { 'file' => 'MoreClass.py', 'line' => 58, 'code' => "foo = bar" } ] } YAML::load_documents( < This is an error message for the log file --- Time: 2001-11-23 15:02:31 -05:00 User: ed Warning: > A slightly different error message. --- Date: 2001-11-23 15:03:17 -05:00 User: ed Fatal: > Unknown variable "bar" Stack: - file: TopClass.py line: 23 code: | x = MoreObject("345\\n") - file: MoreClass.py line: 58 code: |- foo = bar EOY ) { |doc| case doc_ct when 0 assert_equals( doc, doc1 ) when 1 assert_equals( doc, doc2 ) when 2 assert_equals( doc, doc3 ) end doc_ct += 1 } assert_equals( doc_ct, 3 ) doc_ct = 0 yp = YAML::Syck::Parser.new yp.resolver = YAML.resolver yp.input = :bytecode yp.load_documents( "D\nM\nSTime\nS2001-11-23 15:01:42 -05:00\nSUser\nSed\nSWarning\nSThis is an error message for the log file\nN\nE\n" + "D\nM\nSTime\nS2001-11-23 15:02:31 -05:00\nSUser\nSed\nSWarning\nSA slightly different error message.\nN\nE\n" + "D\nM\nSDate\nS2001-11-23 15:03:17 -05:00\nSUser\nSed\nSFatal\nSUnknown variable \"bar\"\nN\nSStack\n" + "Q\nM\nSfile\nSTopClass.py\nSline\nS23\nScode\nSx = MoreObject(\"345\\n\")\nN\nE\n" + "M\nSfile\nSMoreClass.py\nSline\nS58\nScode\nSfoo = bar\nE\nE\nE\n" ) { |doc| case doc_ct when 0 assert_equals( doc, doc1 ) when 1 assert_equals( doc, doc2 ) when 2 assert_equals( doc, doc3 ) end doc_ct += 1 } assert_equals( doc_ct, 3 ) end def test_spec_root_fold y = YAML::load( < This YAML stream contains a single text value. The next stream is a log file - a sequence of log entries. Adding an entry to the log is a simple matter of appending it at the end. EOY ) assert_equals( y, "This YAML stream contains a single text value. The next stream is a log file - a sequence of log entries. Adding an entry to the log is a simple matter of appending it at the end.\n" ) end def test_spec_root_mapping y = YAML::load( < 34843, 'date' => Date.new( 2001, 1, 23 ), 'total' => 4443.52 } ) end # def test_spec_oneline_docs # doc_ct = 0 # YAML::load_documents( < { "customers"=> [ { "given"=>"Chris", "type"=>"domain customer", "family"=>"Dumars" } ], "type"=>"domain invoice" } } assert_to_yaml( map, <"contains three lines of text.\nThe third one starts with a\n# character. This isn't a comment.\n"} assert_to_yaml( map, < 12, 'also int' => 12, 'string' => '12' } assert_to_yaml( map, < 8, 'color' => 'black' } ) # when 1 # assert_equals( doc['bearing'].type_id, 'x-private:ball' ) # assert_equals( doc['bearing'].transform.value, { 'material' => 'steel' } ) # end # doc_ct += 1 # } # assert_equals( doc_ct, 2 ) # # doc_ct = 0 # YAML::Syck::Parser.new( :Input => :Bytecode, :Model => :Generic )::load_documents( # "D\nc Private types are per-document.\nM\nSpool\nT!!ball\n" + # "M\nSnumber\nS8\nScolor\nSblack\nE\nE\n" + # "D\nM\nSbearing\nT!!ball\nM\nSmaterial\nSsteel\nE\nE\n" # ) { |doc| # case doc_ct # when 0 # assert_equals( doc['pool'].type_id, 'x-private:ball' ) # assert_equals( doc['pool'].transform.value, { 'number' => 8, 'color' => 'black' } ) # when 1 # assert_equals( doc['bearing'].type_id, 'x-private:ball' ) # assert_equals( doc['bearing'].transform.value, { 'material' => 'steel' } ) # end # doc_ct += 1 # } # assert_equals( doc_ct, 2 ) # end def test_spec_url_escaping YAML.add_domain_type( "domain.tld,2002", "type0" ) { |type, val| "ONE: #{val}" } YAML.add_domain_type( "domain.tld,2002", "type%30" ) { |type, val| "TWO: #{val}" } map = { 'same' => [ 'ONE: value', 'ONE: value' ], 'different' => [ 'TWO: value' ] } assert_to_yaml( map, < 'This scalar has an anchor.', 'override' => a001, 'alias' => a001 } assert_to_yaml( anc, < The alias node below is a repeated use of this value. alias : *A001 EOY assert_bytecode( anc, "D\nM\nSanchor\nAA001\nSThis scalar has an anchor.\nSoverride\nAA001\n" + "SThe alias node below is a repeated use of this value.\nN\nSalias\nRA001\nE\n" ) end def test_spec_explicit_families YAML.add_domain_type( "somewhere.com,2002", 'type' ) { |type, val| "SOMEWHERE: #{val}" } map = { 'not-date' => '2002-04-28', 'picture' => "GIF89a\f\000\f\000\204\000\000\377\377\367\365\365\356\351\351\345fff\000\000\000\347\347\347^^^\363\363\355\216\216\216\340\340\340\237\237\237\223\223\223\247\247\247\236\236\236i^\020' \202\n\001\000;", 'hmm' => "SOMEWHERE: family above is short for\nhttp://somewhere.com/type\n" } assert_to_yaml( map, <7, "center"=>{"x"=>73, "y"=>129}, "TYPE"=>"Shape: graph/circle"}, {"finish"=>{"x"=>89, "y"=>102}, "TYPE"=>"Shape: graph/line", "start"=>{"x"=>73, "y"=>129}}, {"TYPE"=>"Shape: graph/text", "value"=>"Pretty vector drawing.", "start"=>{"x"=>73, "y"=>129}, "color"=>16772795}, "Shape Container"]] assert_to_yaml( seq, < [], 'in-line' => [ 'one', 'two', 'three', 'four', 'five' ], 'nested' => [ 'First item in top sequence', [ 'Subordinate sequence entry' ], "A multi-line sequence entry\n", 'Sixth item in top sequence' ] }, < A multi-line sequence entry - Sixth item in top sequence EOY ) end def test_spec_builtin_map # Assortment of mappings assert_to_yaml( { 'empty' => {}, 'in-line' => { 'one' => 1, 'two' => 2 }, 'spanning' => { 'one' => 1, 'two' => 2 }, 'nested' => { 'first' => 'First entry', 'second' => { 'key' => 'Subordinate mapping' }, 'third' => [ 'Subordinate sequence', {}, 'Previous mapping is empty.', { 'A key' => 'value pair in a sequence.', 'A second' => 'key:value pair.' }, 'The previous entry is equal to the following one.', { 'A key' => 'value pair in a sequence.', 'A second' => 'key:value pair.' } ], 12.0 => 'This key is a float.', "?\n" => 'This key had to be protected.', "\a" => 'This key had to be escaped.', "This is a multi-line folded key\n" => "Whose value is also multi-line.\n", [ 'This key', 'is a sequence' ] => [ 'With a sequence value.' ] } }, < ? : This key had to be protected. "\\a" : This key had to be escaped. ? > This is a multi-line folded key : > Whose value is also multi-line. ? - This key - is a sequence : - With a sequence value. # The following parses correctly, # but Ruby 1.6.* fails the comparison! # ? # This: key # is a: mapping # : # with a: mapping value. EOY ) end def test_spec_builtin_literal_blocks # Assortment of literal scalar blocks assert_to_yaml( {"both are equal to"=>" This has no newline.", "is equal to"=>"The \\ ' \" characters may be\nfreely used. Leading white\n space is significant.\n\nLine breaks are significant.\nThus this value contains one\nempty line and ends with a\nsingle line break, but does\nnot start with one.\n", "also written as"=>" This has no newline.", "indented and chomped"=>" This has no newline.", "empty"=>"", "literal"=>"The \\ ' \" characters may be\nfreely used. Leading white\n space is significant.\n\nLine breaks are significant.\nThus this value contains one\nempty line and ends with a\nsingle line break, but does\nnot start with one.\n"}, < str1, 'same as "clipped" above' => str1, 'stripped' => str2, 'same as "stripped" above' => str2, 'kept' => str3, 'same as "kept" above' => str3 }, <"a single quote ' must be escaped.", "second"=>"! : \\ etc. can be used freely.", "is same as"=>"this contains six spaces\nand one line break", "empty"=>"", "span"=>"this contains six spaces\nand one line break"}, <"this contains four spaces", "third"=>"a \" or a \\ must be escaped.", "second"=>"! : etc. can be used freely.", "empty"=>"", "fourth"=>"this value ends with an LF.\n", "span"=>"this contains four spaces"}, < mktime( 2001, 12, 14, 21, 59, 43, ".10", "-05:00" ), "canonical" => mktime( 2001, 12, 15, 2, 59, 43, ".10" ), "date (noon UTC)" => Date.new( 2002, 12, 14), "valid iso8601" => mktime( 2001, 12, 14, 21, 59, 43, ".10", "-05:00" ) }, < arrow_gif, 'base64' => arrow_gif, 'description' => "The binary value above is a tiny arrow encoded as a gif image.\n" }, < /George McFly/i }, < 2, :UseVersion => 0 ) y.add( { 'hi' => 'hello', 'map' => { 'good' => 'two' }, 'time' => Time.now, 'try' => /^po(.*)$/, 'bye' => 'goodbye' } ) y.add( { 'po' => 'nil', 'oper' => 90 } ) y.add( { 'hi' => 'wow!', 'bye' => 'wow!' } ) y.add( { [ 'Red Socks', 'Boston' ] => [ 'One', 'Two', 'Three' ] } ) y.add( [ true, false, false ] ) end # # Test YPath choices parsing # def test_ypath_parsing assert_path_segments( "/*/((one|three)/name|place)|//place", [ ["*", "one", "name"], ["*", "three", "name"], ["*", "place"], ["/", "place"] ] ) end # # Test of Ranges # def test_ranges # Simple numeric assert_to_yaml( 1..3, <"A,","B"=>"B"}, <2, "2"=>3}, <"b"}] * 2, <"b", "c"=>"d"} } # YAML::load( a.to_yaml ) end # # Test Time.now cycle # def test_time_now_cycle # # From Minero Aoki [ruby-core:2305] # require 'yaml' t = Time.now 5.times do assert_equals( t, YAML.load( YAML.dump( t ) ) ) end end # # # # Circular references # # # def test_circular_references # a = []; a[0] = a; a[1] = a # inspect_str = "[[...], [...]]" # assert_equals( inspect_str, YAML::load( a.to_yaml ).inspect ) # end end RUNIT::CUI::TestRunner.run( YAML_Unit_Tests.suite ) ruby-mkrf-0.2.3.orig/test/sample_files/syck-0.55/ext/ruby/install.rb0000644000000000000000000005273511672453175023652 0ustar rootroot# # This file is automatically generated. DO NOT MODIFY! # # install.rb # # Copyright (c) 2000-2002 Minero Aoki # # This program is free software. # You can distribute/modify this program under the terms of # the GNU Lesser General Public License version 2. # ### begin compat.rb unless Enumerable.instance_methods.include? 'inject' module Enumerable def inject( result ) each do |i| result = yield(result, i) end result end end end def File.read_all( fname ) File.open(fname, 'rb') {|f| return f.read } end def File.write( fname, str ) File.open(fname, 'wb') {|f| f.write str } end ### end compat.rb ### begin config.rb if i = ARGV.index(/\A--rbconfig=/) file = $' ARGV.delete_at(i) require file else require 'rbconfig' end class ConfigTable c = ::Config::CONFIG rubypath = c['bindir'] + '/' + c['ruby_install_name'] major = c['MAJOR'].to_i minor = c['MINOR'].to_i teeny = c['TEENY'].to_i version = "#{major}.#{minor}" # ruby ver. >= 1.4.4? newpath_p = ((major >= 2) or ((major == 1) and ((minor >= 5) or ((minor == 4) and (teeny >= 4))))) re = Regexp.new('\A' + Regexp.quote(c['prefix'])) subprefix = lambda {|path| re === path and path.sub(re, '$prefix') } if c['rubylibdir'] # V < 1.6.3 stdruby = subprefix.call(c['rubylibdir']) siteruby = subprefix.call(c['sitedir']) versite = subprefix.call(c['sitelibdir']) sodir = subprefix.call(c['sitearchdir']) elsif newpath_p # 1.4.4 <= V <= 1.6.3 stdruby = "$prefix/lib/ruby/#{version}" siteruby = subprefix.call(c['sitedir']) versite = siteruby + '/' + version sodir = "$site-ruby/#{c['arch']}" else # V < 1.4.4 stdruby = "$prefix/lib/ruby/#{version}" siteruby = "$prefix/lib/ruby/#{version}/site_ruby" versite = siteruby sodir = "$site-ruby/#{c['arch']}" end DESCRIPTER = [ [ 'prefix', [ c['prefix'], 'path', 'path prefix of target environment' ] ], [ 'std-ruby', [ stdruby, 'path', 'the directory for standard ruby libraries' ] ], [ 'site-ruby-common', [ siteruby, 'path', 'the directory for version-independent non-standard ruby libraries' ] ], [ 'site-ruby', [ versite, 'path', 'the directory for non-standard ruby libraries' ] ], [ 'bin-dir', [ '$prefix/bin', 'path', 'the directory for commands' ] ], [ 'rb-dir', [ '$site-ruby', 'path', 'the directory for ruby scripts' ] ], [ 'so-dir', [ sodir, 'path', 'the directory for ruby extentions' ] ], [ 'data-dir', [ '$prefix/share', 'path', 'the directory for shared data' ] ], [ 'ruby-path', [ rubypath, 'path', 'path to set to #! line' ] ], [ 'ruby-prog', [ rubypath, 'name', 'the ruby program using for installation' ] ], [ 'make-prog', [ 'make', 'name', 'the make program to compile ruby extentions' ] ], [ 'without-ext', [ 'no', 'yes/no', 'does not compile/install ruby extentions' ] ] ] SAVE_FILE = 'config.save' def ConfigTable.each_name( &block ) keys().each(&block) end def ConfigTable.keys DESCRIPTER.collect {|k,*dummy| k } end def ConfigTable.each_definition( &block ) DESCRIPTER.each(&block) end def ConfigTable.get_entry( name ) name, ent = DESCRIPTER.assoc(name) ent end def ConfigTable.get_entry!( name ) get_entry(name) or raise ArgumentError, "no such config: #{name}" end def ConfigTable.add_entry( name, vals ) ConfigTable::DESCRIPTER.push [name,vals] end def ConfigTable.remove_entry( name ) get_entry name or raise ArgumentError, "no such config: #{name}" DESCRIPTER.delete_if {|n,arr| n == name } end def ConfigTable.config_key?( name ) get_entry(name) ? true : false end def ConfigTable.bool_config?( name ) ent = get_entry(name) or return false ent[1] == 'yes/no' end def ConfigTable.value_config?( name ) ent = get_entry(name) or return false ent[1] != 'yes/no' end def ConfigTable.path_config?( name ) ent = get_entry(name) or return false ent[1] == 'path' end class << self alias newobj new def new c = newobj() c.__send__ :init c end def load c = newobj() raise InstallError, "#{File.basename $0} config first"\ unless File.file? SAVE_FILE File.foreach(SAVE_FILE) do |line| k, v = line.split(/=/, 2) c.instance_eval { @table[k] = v.strip } end c end end def initialize @table = {} end def init DESCRIPTER.each do |k, (default, vname, desc, default2)| @table[k] = default end end private :init def save File.open(SAVE_FILE, 'w') {|f| @table.each do |k, v| f.printf "%s=%s\n", k, v if v end } end def []=( k, v ) ConfigTable.config_key? k or raise InstallError, "unknown config option #{k}" if ConfigTable.path_config? k @table[k] = (v[0,1] != '$') ? File.expand_path(v) : v else @table[k] = v end end def []( key ) @table[key] or return nil @table[key].gsub(%r<\$([^/]+)>) { self[$1] } end def set_raw( key, val ) @table[key] = val end def get_raw( key ) @table[key] end end class MetaConfigEnvironment def self.eval_file( file ) return unless File.file? file new.instance_eval File.read_all(file), file, 1 end private def config_names ConfigTable.keys end def config?( name ) ConfigTable.config_key? name end def bool_config?( name ) ConfigTable.bool_config? name end def value_config?( name ) ConfigTable.value_config? name end def path_config?( name ) ConfigTable.path_config? name end def add_config( name, argname, default, desc ) ConfigTable.add_entry name,[default,argname,desc] end def add_path_config( name, default, desc ) add_config name, 'path', default, desc end def add_bool_config( name, default, desc ) add_config name, 'yes/no', default ? 'yes' : 'no', desc end def set_config_default( name, default ) if bool_config? name ConfigTable.get_entry!(name)[0] = default ? 'yes' : 'no' else ConfigTable.get_entry!(name)[0] = default end end def remove_config( name ) ent = ConfigTable.get_entry(name) ConfigTable.remove_entry name ent end end ### end config.rb ### begin fileop.rb module FileOperations def mkdir_p( dname, prefix = nil ) dname = prefix + dname if prefix $stderr.puts "mkdir -p #{dname}" if verbose? return if no_harm? # does not check '/'... it's too abnormal case dirs = dname.split(%r<(?=/)>) if /\A[a-z]:\z/i === dirs[0] disk = dirs.shift dirs[0] = disk + dirs[0] end dirs.each_index do |idx| path = dirs[0..idx].join('') Dir.mkdir path unless dir? path end end def rm_f( fname ) $stderr.puts "rm -f #{fname}" if verbose? return if no_harm? if File.exist? fname or File.symlink? fname File.chmod 0777, fname File.unlink fname end end def rm_rf( dn ) $stderr.puts "rm -rf #{dn}" if verbose? return if no_harm? Dir.chdir dn Dir.foreach('.') do |fn| next if fn == '.' next if fn == '..' if dir? fn verbose_off { rm_rf fn } else verbose_off { rm_f fn } end end Dir.chdir '..' Dir.rmdir dn end def mv( src, dest ) rm_f dest begin File.link src, dest rescue File.write dest, File.read_all(src) File.chmod File.stat(src).mode, dest end rm_f src end def install( from, dest, mode, prefix = nil ) $stderr.puts "install #{from} #{dest}" if verbose? return if no_harm? realdest = prefix + dest if prefix if dir? realdest realdest += '/' + File.basename(from) end str = File.read_all(from) if diff? str, realdest verbose_off { rm_f realdest if File.exist? realdest } File.write realdest, str File.chmod mode, realdest File.open(objdir + '/InstalledFiles', 'a') {|f| f.puts realdest } end end def diff?( orig, targ ) return true unless File.exist? targ orig != File.read_all(targ) end def command( str ) $stderr.puts str if verbose? system str or raise RuntimeError, "'system #{str}' failed" end def ruby( str ) command config('ruby-prog') + ' ' + str end def dir?( dname ) # for corrupted windows stat() File.directory?((dname[-1,1] == '/') ? dname : dname + '/') end def all_files( dname ) Dir.open(dname) {|d| return d.find_all {|n| File.file? "#{dname}/#{n}" } } end def all_dirs( dname ) Dir.open(dname) {|d| return d.find_all {|n| dir? "#{dname}/#{n}" } - %w(. ..) } end end ### end fileop.rb ### begin base.rb class InstallError < StandardError; end class Installer Version = '3.1.3' Copyright = 'Copyright (c) 2000-2002 Minero Aoki' @toplevel = nil def self.declear_toplevel_installer( inst ) @toplevel and raise ArgumentError, 'more than one toplevel installer decleared' @toplevel = inst end def self.toplevel_installer @toplevel end FILETYPES = %w( bin lib ext data ) include FileOperations def initialize( config, opt, srcroot, objroot ) @config = config @options = opt @srcdir = File.expand_path(srcroot) @objdir = File.expand_path(objroot) @currdir = '.' end def inspect "#<#{self.class} #{__id__}>" end # # configs/options # def get_config( key ) @config[key] end alias config get_config def set_config( key, val ) @config[key] = val end def no_harm? @options['no-harm'] end def verbose? @options['verbose'] end def verbose_off save, @options['verbose'] = @options['verbose'], false yield @options['verbose'] = save end # # srcdir/objdir # attr_reader :srcdir alias srcdir_root srcdir alias package_root srcdir def curr_srcdir "#{@srcdir}/#{@currdir}" end attr_reader :objdir alias objdir_root objdir def curr_objdir "#{@objdir}/#{@currdir}" end def srcfile( path ) curr_srcdir + '/' + path end def srcexist?( path ) File.exist? srcfile(path) end def srcdirectory?( path ) dir? srcfile(path) end def srcfile?( path ) File.file? srcfile(path) end def srcentries( path = '.' ) Dir.open(curr_srcdir + '/' + path) {|d| return d.to_a - %w(. ..) - hookfilenames } end def srcfiles( path = '.' ) srcentries(path).find_all {|fname| File.file? File.join(curr_srcdir, path, fname) } end def srcdirectories( path = '.' ) srcentries(path).find_all {|fname| dir? File.join(curr_srcdir, path, fname) } end def dive_into( rel ) return unless dir? "#{@srcdir}/#{rel}" dir = File.basename(rel) Dir.mkdir dir unless dir? dir save = Dir.pwd Dir.chdir dir $stderr.puts '---> ' + rel if verbose? @currdir = rel yield Dir.chdir save $stderr.puts '<--- ' + rel if verbose? @currdir = File.dirname(rel) end # # config # def exec_config exec_task_traverse 'config' end def config_dir_bin( rel ) end def config_dir_lib( rel ) end def config_dir_ext( rel ) extconf if extdir? curr_srcdir end def extconf opt = @options['config-opt'].join(' ') command "#{config('ruby-prog')} #{curr_srcdir}/extconf.rb #{opt}" end def config_dir_data( rel ) end # # setup # def exec_setup exec_task_traverse 'setup' end def setup_dir_bin( relpath ) all_files(curr_srcdir).each do |fname| add_rubypath "#{curr_srcdir}/#{fname}" end end SHEBANG_RE = /\A\#!\s*\S*ruby\S*/ def add_rubypath( path ) $stderr.puts %Q if verbose? return if no_harm? tmpfile = File.basename(path) + '.tmp' begin File.open(path) {|r| File.open(tmpfile, 'w') {|w| first = r.gets return unless SHEBANG_RE === first # reject '/usr/bin/env ruby' w.print first.sub(SHEBANG_RE, '#!' + config('ruby-path')) w.write r.read } } mv tmpfile, File.basename(path) ensure rm_f tmpfile if File.exist? tmpfile end end def setup_dir_lib( relpath ) end def setup_dir_ext( relpath ) make if extdir?(curr_srcdir) end def make command config('make-prog') end def setup_dir_data( relpath ) end # # install # def exec_install exec_task_traverse 'install' end def install_dir_bin( rel ) install_files targfiles, config('bin-dir') + '/' + rel, 0755 end def install_dir_lib( rel ) install_files targfiles, config('rb-dir') + '/' + rel, 0644 end def install_dir_ext( rel ) install_dir_ext_main File.dirname(rel) if extdir?(curr_srcdir) end def install_dir_ext_main( rel ) install_files allext('.'), config('so-dir') + '/' + rel, 0555 end def install_dir_data( rel ) install_files targfiles, config('data-dir') + '/' + rel, 0644 end def install_files( list, dest, mode ) mkdir_p dest, @options['install-prefix'] list.each do |fname| install fname, dest, mode, @options['install-prefix'] end end def targfiles (targfilenames() - hookfilenames()).collect {|fname| File.exist?(fname) ? fname : File.join(curr_srcdir(), fname) } end def targfilenames [ curr_srcdir(), '.' ].inject([]) {|ret, dir| ret | all_files(dir) } end def hookfilenames %w( pre-%s post-%s pre-%s.rb post-%s.rb ).collect {|fmt| %w( config setup install clean ).collect {|t| sprintf fmt, t } }.flatten end def allext( dir ) _allext(dir) or raise InstallError, "no extention exists: Have you done 'ruby #{$0} setup' ?" end DLEXT = /\.#{ ::Config::CONFIG['DLEXT'] }\z/ def _allext( dir ) Dir.open(dir) {|d| return d.find_all {|fname| DLEXT === fname } } end # # clean # def exec_clean exec_task_traverse 'clean' rm_f 'config.save' rm_f 'InstalledFiles' end def clean_dir_bin( rel ) end def clean_dir_lib( rel ) end def clean_dir_ext( rel ) clean end def clean command config('make-prog') + ' clean' if File.file? 'Makefile' end def clean_dir_data( rel ) end # # lib # def exec_task_traverse( task ) run_hook 'pre-' + task FILETYPES.each do |type| if config('without-ext') == 'yes' and type == 'ext' $stderr.puts 'skipping ext/* by user option' if verbose? next end traverse task, type, task + '_dir_' + type end run_hook 'post-' + task end def traverse( task, rel, mid ) dive_into(rel) { run_hook 'pre-' + task __send__ mid, rel.sub(%r_\A.*?(?:/|\z)_, '') all_dirs(curr_srcdir).each do |d| traverse task, rel + '/' + d, mid end run_hook 'post-' + task } end def run_hook( name ) try_run_hook curr_srcdir + '/' + name or try_run_hook curr_srcdir + '/' + name + '.rb' end def try_run_hook( fname ) return false unless File.file? fname env = self.dup begin env.instance_eval File.read_all(fname), fname, 1 rescue raise InstallError, "hook #{fname} failed:\n" + $!.message end true end def extdir?( dir ) File.exist? dir + '/MANIFEST' end end ### end base.rb ### begin toplevel.rb class ToplevelInstaller < Installer TASKS = [ [ 'config', 'saves your configurations' ], [ 'show', 'shows current configuration' ], [ 'setup', 'compiles extention or else' ], [ 'install', 'installs files' ], [ 'clean', "does `make clean' for each extention" ] ] def initialize( root ) super nil, {'verbose' => true}, root, '.' Installer.declear_toplevel_installer self end def execute run_metaconfigs case task = parsearg_global() when 'config' @config = ConfigTable.new else @config = ConfigTable.load end parsearg_TASK task exectask task end def run_metaconfigs MetaConfigEnvironment.eval_file "#{srcdir_root()}/#{metaconfig()}" end def metaconfig 'metaconfig' end def exectask( task ) if task == 'show' exec_show else try task end end def try( task ) $stderr.printf "#{File.basename $0}: entering %s phase...\n", task if verbose? begin __send__ 'exec_' + task rescue $stderr.printf "%s failed\n", task raise end $stderr.printf "#{File.basename $0}: %s done.\n", task if verbose? end # # processing arguments # def parsearg_global task_re = /\A(?:#{TASKS.collect {|i| i[0] }.join '|'})\z/ while arg = ARGV.shift case arg when /\A\w+\z/ task_re === arg or raise InstallError, "wrong task: #{arg}" return arg when '-q', '--quiet' @options['verbose'] = false when '--verbose' @options['verbose'] = true when '-h', '--help' print_usage $stdout exit 0 when '-v', '--version' puts "#{File.basename $0} version #{Version}" exit 0 when '--copyright' puts Copyright exit 0 else raise InstallError, "unknown global option '#{arg}'" end end raise InstallError, "No task or global option given. Typical installation procedure is: $ ruby #{File.basename $0} config $ ruby #{File.basename $0} setup # ruby #{File.basename $0} install (may require root privilege) " end def parsearg_TASK( task ) mid = "parsearg_#{task}" if respond_to? mid, true __send__ mid else ARGV.empty? or raise InstallError, "#{task}: unknown options: #{ARGV.join ' '}" end end def parsearg_config re = /\A--(#{ConfigTable.keys.join '|'})(?:=(.*))?\z/ @options['config-opt'] = [] while i = ARGV.shift if /\A--?\z/ === i @options['config-opt'] = ARGV.dup break end m = re.match(i) or raise InstallError, "config: unknown option #{i}" name, value = m.to_a[1,2] if value if ConfigTable.bool_config?(name) /\A(y(es)?|n(o)?|t(rue)?|f(alse))\z/i === value or raise InstallError, "config: --#{name} allows only yes/no for argument" value = (/\Ay(es)?|\At(rue)/i === value) ? 'yes' : 'no' end else ConfigTable.bool_config?(name) or raise InstallError, "config: --#{name} requires argument" value = 'yes' end @config[name] = value end end def parsearg_install @options['no-harm'] = false @options['install-prefix'] = '' while a = ARGV.shift case a when /\A--no-harm\z/ @options['no-harm'] = true when /\A--prefix=(.*)\z/ path = $1 path = File.expand_path(path) unless path[0,1] == '/' @options['install-prefix'] = path else raise InstallError, "install: unknown option #{a}" end end end def print_usage( out ) out.puts 'Typical Installation Procedure:' out.puts " $ ruby #{File.basename $0} config" out.puts " $ ruby #{File.basename $0} setup" out.puts " # ruby #{File.basename $0} install (may require root privilege)" out.puts out.puts 'Detailed Usage:' out.puts " ruby #{File.basename $0} " out.puts " ruby #{File.basename $0} [] []" fmt = " %-20s %s\n" out.puts out.puts 'Global options:' out.printf fmt, '-q,--quiet', 'suppress message outputs' out.printf fmt, ' --verbose', 'output messages verbosely' out.printf fmt, '-h,--help', 'print this message' out.printf fmt, '-v,--version', 'print version and quit' out.printf fmt, ' --copyright', 'print copyright and quit' out.puts out.puts 'Tasks:' TASKS.each do |name, desc| out.printf " %-10s %s\n", name, desc end out.puts out.puts 'Options for config:' ConfigTable.each_definition do |name, (default, arg, desc, default2)| out.printf " %-20s %s [%s]\n", '--'+ name + (ConfigTable.bool_config?(name) ? '' : '='+arg), desc, default2 || default end out.printf " %-20s %s [%s]\n", '--rbconfig=path', 'your rbconfig.rb to load', "running ruby's" out.puts out.puts 'Options for install:' out.printf " %-20s %s [%s]\n", '--no-harm', 'only display what to do if given', 'off' out.printf " %-20s %s [%s]\n", '--prefix', 'install path prefix', '$prefix' out.puts end # # config # def exec_config super @config.save end # # show # def exec_show ConfigTable.each_name do |k| v = @config.get_raw(k) if not v or v.empty? v = '(not specified)' end printf "%-10s %s\n", k, v end end end ### end toplevel.rb if $0 == __FILE__ begin installer = ToplevelInstaller.new(File.dirname($0)) installer.execute rescue raise if $DEBUG $stderr.puts $!.message $stderr.puts "Try 'ruby #{$0} --help' for detailed usage." exit 1 end end ruby-mkrf-0.2.3.orig/test/sample_files/syck-0.55/ext/ruby/CHANGELOG0000644000000000000000000002400511672453175023056 0ustar rootroot--- %YAML:1.0 - version: 0.60 date: 2003-06-05 changes: - New parser backed by Syck. [http://whytheluckystiff.net/syck/] 100x faster. - New YAML::DBM module. - version: 0.50 date: 2003-01-13 changes: - YAML::Store more like PStore fixes. Thank you, Eric Hodel. - Bug fix to transfer methods, anchors, aliases in a seq-map shortcut. Thank you, Brad Hilton. - Prototype YPath support nearly complete (still need to handle '..'). - New YAML::YamlNode#select! method will transform the results of a selection. - No more '+' and '-' boolean implicits. - Sequence-in-sequence shortcut nailed. - Output YAML repaired for YTS. - Radical 0.6 + YAML patches. - version: 0.49 date: 2003-01-12 changes: - New, cleaner, more efficient tokenizer. Three months worth of work, kids. - Line numbers now reported on errors. - New map-seq shortcut. - New collection types. !omap, !pairs, !set. - New implicits for !null, !float, and !boolean. - Merge token changed to '<<'. - Added sexagecimal format (for time and degrees). - Removed the 'undef' possibility. - Fixed implicit empties in inline sequences. - Flow values allowed in their own indentation. - Parsing plain scalars better than ever. - version: 0.48 date: 2003-01-04 changes: - Empty string implicit. - New merge type rules! See http://www.yaml.org/type/merge/. - URL encoding removed, replaced with YAML escaping. - Binary type now does base64 exclusively. - Removed parentheses syntax from !null, !bool and !float implicits. - Fixes to the seq-map shortcut, addition of the new map-seq shortcut! - Enhanced !okay/rpc's server.about return. - The install script's --force command was removed. - Directives now in %KEY:VALUE format. - version: 0.47 date: 2002-11-11 changes: - New IO via StringIO and IO#readline. A change from 0.50 that's being merged in now for speed. - Trailing document separator bugfix. - DomainType class for handling undefined type families. - YAML::each_document alias for YAML::load_documents - YAML::each_node alias for YAML::parse_documents - version: 0.46 date: 2002-11-01 changes: - RubyConf fluf release. - In other words, no real changes. - I'm talking about total jubilation. - version: 0.45 date: 2002-10-31 changes: - Yod generates PDF (requires Ruby-ClibPDF). - Speed improvement of 20-30% in yaml.rb parser. - Support for Okay modules. - Long single-line strings will be folded if emitter has folding on. - version: 0.44 date: 2002-10-10 changes: - New !okay/news and !okay/rpc specifications. - Racc no longer a run YAML. - version: 0.43 date: 2002-09-17 changes: - Strings now anchored only if the string contains a new line. - Prototype for the !okay/type schema validator now included. - Typing and implicit checking system now wired to YAML::transfer_method. - version: 0.42 date: 2002-09-16 changes: - Timestamp usec fixes galore from Michael Witrant. - New API with parse_* and load_* for accessing the generic and native models, respectively. - Basic YPath support checked in. - version: 0.41 date: 2002-09-06 changes: - New taguri typing mechanism (from Last Call spec). - The '/' character allowed to start unquoted strings (from Last Call spec). - Added YAML::Pairs type which capitalizes on the seq-map shortcut in the spec. - Repaired cases in which nested inline collections were screwing things up. - Emitter now emits the seq-map shortcut when possible. - Custom classes which extend from Array and Object should round-trip. - version: 0.40 date: 2002-08-28 changes: - Added Emitter#map and Emitter#seq, along with YAML::quick_emit to ease writing to_yaml methods. - Fixes to headless documents. - Fix to PrivateType. - Fix to empty classes extended from Array and Object. - New reference checker uses anchors and aliases to prevent circular references. - Binary data now emitting. - Kernel::y method for dumping data as an alternative to Kernel::p. - version: 0.39 date: 2002-08-15 changes: - 'Improved multi-line scalar emissions. Beware of Iconv module with large text blocks. It appears to be cutting off after a few k.' - Problem with space indicators emitted by String#to_yaml. Bug reported by Tom Sawyer. - Bug in comment-handling code. YAML couldn't parse the README. - Yod now generating compilable CHM. - version: 0.38 date: 2002-08-12 changes: - Moved the Emitter into its own class. Thread-safe now. - Added some basic Unicode support. - More progress on Yod. - version: 0.37 date: 2002-08-04 changes: - Finished descriptions in the Cookbook. - Added simpler inline Regexp and Range serialization. - version: 0.36 date: 2002-08-01 changes: - Renamed YAML.rb. - Fixed slight inconsistency with space indicators in plain scalars. - version: 0.35 date: 2002-07-29 changes: - Range#to_yaml added. - Space indicators now required as the spec prescribes (http://yaml.org/spec/#space_indicators). - Negative numbers parsing fixed. - version: 0.34 date: 2002-07-29 changes: - Symbol#to_yaml added with abbreviated !ruby/sym syntax. - Problems with NaN, Infinity, Floats emission. - Complex keys, Symbol keys, Regexp keys bugfixes. - Bug in Time#to_yaml. My bad. - version: 0.33 date: 2002-07-29 changes: - Better String#to_yaml, uses the parser's implicit type checker now! - Headless documents now skipping the initial newline properly. - Turned off SortKeys by default; still turned on in the tests. - version: 0.32 date: 2002-07-28 changes: - Opened up unquoted strings to more characters, is closer to spec now. - Round tripping started with YTS. - Problems with foreign characters fixed. Thanks, Robert Wagner. - Problems with Array#to_yaml and Hash#to_yaml not giving a newline when nested. Again, RW. - Bugs in positive timezones with Time#to_yaml fixed by Tobias Peters! - version: 0.31 date: 2002-07-27 changes: - Fixed bug in multiline quoted strings. - Added YTS cases for odd newline and spaced block scalars. - version: 0.30 date: 2002-07-26 changes: - Added support for Steve's new YTS streams. - Refactored newline handling across all blocks. - version: 0.29 date: 2002-07-24 changes: - First stab at Unicode support. - Basic tests fixed. - version: 0.28 date: 2002-07-24 changes: - Special comment key and default key supported. - Multiline unquoted supported in in-line constructs. - version: 0.27 date: 2002-07-24 changes: - Handling of literal and folded blocks finally matches the spec for indented blocks. - All indentation should be handled correctly. - Indented comments properly handled. - version: 0.26 date: 2002-07-23 changes: - Fixed Timestamps to handle single character usec. - Complete spec now in YTS. - version: 0.25 date: 2002-07-21 changes: - New Object#to_yaml provides a good generic export for most Ruby objects. - Fixes to the TrueClass, FalseClass, NilClass export methods. - Test harness for the YTS (YamlTestingSuite). - Distribution now includes yaml4r.rb directly, use --force to remake. - version: 0.24 date: 2002-07-17 changes: - Started CHM output for Yod. - Better support for multiline scalars, multiline double-quoted strings, and multiline single-quoted strings. - Cleaned up the parser code, its indentation and comments. - version: 0.23 date: 2002-07-16 changes: - Major fixes to the Time emitter and parser. Time zones should work. - Fixes to indentation at the start of a document. - More Yod work. - version: 0.22 date: 2002-07-16 changes: - Started Yod documentation. - Working to fix indentation problems. - version: 0.21 date: 2002-07-15 changes: - Private types now available via add_private_type or returns a PrivateType class. - URI escaping. - All ruby types now registered under the ruby.yaml.org domain as the spec describes. - Fixed double-quoted string ending on last character of the document. - version: 0.20 date: 2002-07-14 changes: - Better error reporting from the parser. Shows the sensitive area and sometimes a bit of advice. - Binary data builtin now suported for strings and scalar blocks. - Spanning quoted strings should be good. - Pause marker supported. - version: 0.19 date: 2002-07-12 changes: - Parsing of multiple documents via YAML4R::load_document and Parser#parse_documents. - version: 0.18 date: 2002-07-11 changes: - New YAML4R::add_domain_type, YAML4R::add_ruby_type, YAML4R::add_builtin_type! Beginning of an era! - Transfer method prefixing. - Better support for commas, colons in unquoted strings. - All supported types now added with add_builtin_type. - Separators in scalars was throwing the parser off. - Seperated unit tests into smaller, more specific functions. - README and CHANGELOG now fully parsed by YAML4R. - The install.rb now has a --force option to force Racc to remake the grammar. - version: 0.17 date: 2002-07-10 changes: - Implicit Time elements added (without timezones). - Folded and literal blocks alot closer. - Spanned strings making progress. - Struct fully round-trip. - version: 0.16 date: 2002-07-09 changes: - Nearing completion of scalar blocks, literal and folded both implemented. - Improvements to the Struct class round-trip. - Adjunct words are combined to prevent odd errors. - version: 0.15 date: 2002-07-09 changes: - Added ! explicit implicit operator. - Added Struct#to_yaml. - Fixes to double-quoted strings. - Fixes to Regexp#to_yaml. Added unit tests to reflect. - version: 0.14 date: 2002-07-08 changes: - Initial public release. - README in YAML. - Complex keys. - Single-quoted, double-quoted scalars. - Transfer methods now parsed. - Cleaned up the grammar a bit more. - Anchors and aliases. ruby-mkrf-0.2.3.orig/test/sample_files/syck-0.55/ext/ruby/lib/0000755000000000000000000000000011672453175022411 5ustar rootrootruby-mkrf-0.2.3.orig/test/sample_files/syck-0.55/ext/ruby/lib/yaml/0000755000000000000000000000000011672453175023353 5ustar rootrootruby-mkrf-0.2.3.orig/test/sample_files/syck-0.55/ext/ruby/lib/yaml/emitter.rb0000644000000000000000000000413411672453175025353 0ustar rootroot# # Output classes and methods # require 'yaml/baseemitter' require 'yaml/encoding' module YAML # # Emit a set of values # class Emitter include BaseEmitter attr_accessor :options def initialize( opts ) opts = {} if opts.class != Hash @options = YAML::DEFAULTS.dup.update( opts ) @headless = 0 @seq_map = false @anchors = {} @anchor_extras = {} @active_anchors = [] @level = -1 self.clear end def clear @buffer = [] end def level @level end # # Version string # def version_s " %YAML:#{@options[:Version]}" if @options[:UseVersion] end # # Header # def header if @headless.nonzero? "" else "---#{version_s} " end end # # Concatenate to the buffer # def <<( str ) #p [ self.id, @level, str ] @buffer.last << str end # # Monitor objects and allow references # def start_object( oid ) @level += 1 @buffer.push( "" ) #p [ self.id, @level, :OPEN ] idx = nil if oid if @anchors.has_key?( oid ) idx = @active_anchors.index( oid ) unless idx idx = @active_anchors.length af_str = "&#{@options[:AnchorFormat]} " % [ idx + 1 ] af_str += @anchor_extras[ @anchors[ oid ] ].to_s @buffer[ @anchors[ oid ] ][0,0] = af_str @headless = 0 if @anchors[ oid ].zero? end idx += 1 @active_anchors.push( oid ) else @anchors[ oid ] = @buffer.length - 1 end end return idx end # # Output method # def end_object @level -= 1 @buffer.push( "" ) #p [ self.id, @level, :END ] if @level < 0 header + @buffer.to_s[@headless..-1].to_s end end end end ruby-mkrf-0.2.3.orig/test/sample_files/syck-0.55/ext/ruby/lib/yaml/types.rb0000644000000000000000000001115411672453175025046 0ustar rootroot# # Classes required by the full core typeset # require 'yaml/compat' module YAML # # Default private type # class PrivateType def self.tag_subclasses?; false; end attr_accessor :type_id, :value def initialize( type, val ) @type_id = type; @value = val @value.taguri = "x-private:#{ @type_id }" end def to_yaml( opts = {} ) @value.to_yaml( opts ) end end # # Default domain type # class DomainType def self.tag_subclasses?; false; end attr_accessor :domain, :type_id, :value def initialize( domain, type, val ) @domain = domain; @type_id = type; @value = val @value.taguri = "tag:#{ @domain }:#{ @type_id }" end def to_yaml( opts = {} ) @value.to_yaml( opts ) end end # # Unresolved objects # class Object def self.tag_subclasses?; false; end def to_yaml( opts = {} ) YAML::quick_emit( object_id, opts ) do |out| out.map( "tag:ruby.yaml.org,2002:object:#{ @class }", to_yaml_style ) do |map| @ivars.each do |k,v| map.add( k, v ) end end end end end # # YAML Hash class to support comments and defaults # class SpecialHash < ::Hash attr_accessor :default def inspect self.default.to_s end def to_s self.default.to_s end def update( h ) if YAML::SpecialHash === h @default = h.default if h.default end super( h ) end def to_yaml( opts = {} ) opts[:DefaultKey] = self.default super( opts ) end end # # Builtin collection: !omap # class Omap < ::Array tag_as "tag:yaml.org,2002:omap" def yaml_initialize( tag, val ) if Array === val val.each do |v| if Hash === v concat( v.to_a ) # Convert the map to a sequence else raise YAML::Error, "Invalid !omap entry: " + val.inspect end end else raise YAML::Error, "Invalid !omap: " + val.inspect end self end def self.[]( *vals ) o = Omap.new 0.step( vals.length - 1, 2 ) do |i| o[vals[i]] = vals[i+1] end o end def []( k ) self.assoc( k ).to_a[1] end def []=( k, *rest ) val, set = rest.reverse if ( tmp = self.assoc( k ) ) and not set tmp[1] = val else self << [ k, val ] end val end def has_key?( k ) self.assoc( k ) ? true : false end def is_complex_yaml? true end def to_yaml( opts = {} ) YAML::quick_emit( self.object_id, opts ) do |out| out.seq( taguri, to_yaml_style ) do |seq| self.each do |v| seq.add( Hash[ *v ] ) end end end end end # # Builtin collection: !pairs # class Pairs < ::Array tag_as "tag:yaml.org,2002:pairs" def yaml_initialize( tag, val ) if Array === val val.each do |v| if Hash === v concat( v.to_a ) # Convert the map to a sequence else raise YAML::Error, "Invalid !pairs entry: " + val.inspect end end else raise YAML::Error, "Invalid !pairs: " + val.inspect end self end def self.[]( *vals ) p = Pairs.new 0.step( vals.length - 1, 2 ) { |i| p[vals[i]] = vals[i+1] } p end def []( k ) self.assoc( k ).to_a end def []=( k, val ) self << [ k, val ] val end def has_key?( k ) self.assoc( k ) ? true : false end def is_complex_yaml? true end def to_yaml( opts = {} ) YAML::quick_emit( self.object_id, opts ) do |out| out.seq( taguri, to_yaml_style ) do |seq| self.each do |v| seq.add( Hash[ *v ] ) end end end end end # # Builtin collection: !set # class Set < ::Hash tag_as "tag:yaml.org,2002:set" end end ruby-mkrf-0.2.3.orig/test/sample_files/syck-0.55/ext/ruby/lib/yaml/yamlnode.rb0000644000000000000000000000230011672453175025503 0ustar rootroot# # YAML::YamlNode class # require 'yaml/basenode' module YAML # # YAML Generic Model container # class YamlNode include BaseNode attr_accessor :kind, :type_id, :value, :anchor def initialize( t, v ) @type_id = t if Hash === v @kind = 'map' @value = {} v.each { |k,v| @value[ k.transform ] = [ k, v ] } elsif Array === v @kind = 'seq' @value = v elsif String === v @kind = 'scalar' @value = v end end # # Transform this node fully into a native type # def transform t = nil if @value.is_a? Hash t = {} @value.each { |k,v| t[ k ] = v[1].transform } elsif @value.is_a? Array t = [] @value.each { |v| t.push v.transform } else t = @value end YAML.transfer_method( @type_id, t ) end end end ruby-mkrf-0.2.3.orig/test/sample_files/syck-0.55/ext/ruby/lib/yaml/stream.rb0000644000000000000000000000127411672453175025177 0ustar rootrootmodule YAML # # YAML::Stream -- for emitting many documents # class Stream attr_accessor :documents, :options def initialize( opts = {} ) @options = opts @documents = [] end def []( i ) @documents[ i ] end def add( doc ) @documents << doc end def edit( doc_num, doc ) @documents[ doc_num ] = doc end def emit( io = nil ) # opts = @options.dup # opts[:UseHeader] = true if @documents.length > 1 out = YAML.emitter out.reset( io || io2 = StringIO.new ) @documents.each { |v| v.to_yaml( out ) } io || ( io2.rewind; io2.read ) end end end ruby-mkrf-0.2.3.orig/test/sample_files/syck-0.55/ext/ruby/lib/yaml/rubytypes.rb0000644000000000000000000003132511672453175025752 0ustar rootroot# -*- mode: ruby; ruby-indent-level: 4; tab-width: 4 -*- vim: sw=4 ts=4 require 'date' require 'yaml/compat' # # Type conversions # class Class def to_yaml( opts = {} ) raise TypeError, "can't dump anonymous class %s" % self.class end end class Object tag_as "tag:ruby.yaml.org,2002:object" def to_yaml_style; end def to_yaml_properties; instance_variables.sort; end def to_yaml( opts = {} ) YAML::quick_emit( object_id, opts ) do |out| out.map( taguri, to_yaml_style ) do |map| to_yaml_properties.each do |m| map.add( m[1..-1], instance_variable_get( m ) ) end end end end end # # Maps: Hash#to_yaml # class Hash tag_as "tag:ruby.yaml.org,2002:hash" tag_as "tag:yaml.org,2002:map" def yaml_initialize( tag, val ) if Array === val update Hash.[]( *val ) # Convert the map to a sequence elsif Hash === val update val else raise YAML::TypeError, "Invalid map explicitly tagged #{ tag }: " + val.inspect end end def to_yaml( opts = {} ) YAML::quick_emit( object_id, opts ) do |out| out.map( taguri, to_yaml_style ) do |map| each do |k, v| map.add( k, v ) end end end end end # # Structs: export as a !map # class Struct tag_as "tag:ruby.yaml.org,2002:struct" def self.tag_class_name; self.name.gsub( "Struct::", "" ); end def self.tag_read_class( name ); "Struct::#{ name }"; end def self.yaml_new( klass, tag, val ) if Hash === val struct_type = nil # # Use existing Struct if it exists # props = {} val.delete_if { |k,v| props[k] = v if k =~ /^@/ } begin struct_name, struct_type = YAML.read_type_class( tag, Struct ) rescue NameError end if not struct_type struct_def = [ tag.split( ':', 4 ).last ] struct_type = Struct.new( *struct_def.concat( val.keys.collect { |k| k.intern } ) ) end # # Set the Struct properties # st = YAML::object_maker( struct_type, {} ) st.members.each do |m| st.send( "#{m}=", val[m] ) end props.each do |k,v| st.instance_variable_set(k, v) end st else raise YAML::TypeError, "Invalid Ruby Struct: " + val.inspect end end def to_yaml( opts = {} ) YAML::quick_emit( object_id, opts ) do |out| # # Basic struct is passed as a YAML map # out.map( taguri, to_yaml_style ) do |map| self.members.each do |m| map.add( m, self[m] ) end self.to_yaml_properties.each do |m| map.add( m, instance_variable_get( m ) ) end end end end end # # Sequences: Array#to_yaml # class Array tag_as "tag:ruby.yaml.org,2002:array" tag_as "tag:yaml.org,2002:seq" def yaml_initialize( tag, val ); concat( val.to_a ); end def to_yaml( opts = {} ) YAML::quick_emit( object_id, opts ) do |out| out.seq( taguri, to_yaml_style ) do |seq| each do |x| seq.add( x ) end end end end end # # Exception#to_yaml # class Exception tag_as "tag:ruby.yaml.org,2002:exception" def Exception.yaml_new( klass, tag, val ) o = YAML.object_maker( klass, { 'mesg' => val.delete( 'message' ) } ) val.each_pair do |k,v| o.instance_variable_set("@#{k}", v) end o end def to_yaml( opts = {} ) YAML::quick_emit( object_id, opts ) do |out| out.map( taguri, to_yaml_style ) do |map| map.add( 'message', message ) to_yaml_properties.each do |m| map.add( m[1..-1], instance_variable_get( m ) ) end end end end end # # String#to_yaml # class String tag_as "tag:ruby.yaml.org,2002:string" tag_as "tag:yaml.org,2002:str" def is_complex_yaml? to_yaml_style or not to_yaml_properties.empty? or self =~ /\n.+/ end def is_binary_data? ( self.count( "^ -~", "^\r\n" ) / self.size > 0.3 || self.count( "\x00" ) > 0 ) end def String.yaml_new( klass, tag, val ) val = { 'str' => val } if String === val if Hash === val s = klass.allocate # Thank you, NaHi String.instance_method(:initialize). bind(s). call( val.delete( 'str' ) ) val.each { |k,v| s.instance_variable_set( k, v ) } s else raise YAML::TypeError, "Invalid String: " + val.inspect end end def to_yaml( opts = {} ) YAML::quick_emit( is_complex_yaml? ? object_id : nil, opts ) do |out| if to_yaml_properties.empty? out.scalar( taguri, self, to_yaml_style ) else out.map( taguri, to_yaml_style ) do |map| map.add( 'str', "#{self}" ) to_yaml_properties.each do |m| map.add( m, instance_variable_get( m ) ) end end end end end end # # Symbol#to_yaml # class Symbol tag_as "tag:ruby.yaml.org,2002:symbol" tag_as "tag:ruby.yaml.org,2002:sym" # yaml_implicit /^:/, :yaml_new def Symbol.yaml_new( klass, tag, val ) if String === val val.intern else raise YAML::TypeError, "Invalid Symbol: " + val.inspect end end def to_yaml( opts = {} ) YAML::quick_emit( nil, opts ) do |out| out.scalar( taguri, self.id2name, :plain ) end end end # # Range#to_yaml # TODO: Rework the Range as a sequence (simpler) # class Range tag_as "tag:ruby.yaml.org,2002:range" def Range.yaml_new( klass, tag, val ) inr = %r'(\w+|[+-]?\d+(?:\.\d+)?(?:e[+-]\d+)?|"(?:[^\\"]|\\.)*")' opts = {} if String === val and val =~ /^#{inr}(\.{2,3})#{inr}$/o r1, rdots, r2 = $1, $2, $3 opts = { 'begin' => YAML.load( "--- #{r1}" ), 'end' => YAML.load( "--- #{r2}" ), 'excl' => rdots.length == 3 } val = {} elsif Hash === val opts['begin'] = val.delete('begin') opts['end'] = val.delete('end') opts['excl'] = val.delete('excl') end if Hash === opts r = YAML::object_maker( klass, {} ) # Thank you, NaHi Range.instance_method(:initialize). bind(r). call( opts['begin'], opts['end'], opts['excl'] ) val.each { |k,v| r.instance_variable_set( k, v ) } r else raise YAML::TypeError, "Invalid Range: " + val.inspect end end def to_yaml( opts = {} ) YAML::quick_emit( object_id, opts ) do |out| # if self.begin.is_complex_yaml? or self.begin.respond_to? :to_str or # self.end.is_complex_yaml? or self.end.respond_to? :to_str or # not to_yaml_properties.empty? out.map( taguri, to_yaml_style ) do |map| map.add( 'begin', self.begin ) map.add( 'end', self.end ) map.add( 'excl', self.exclude_end? ) to_yaml_properties.each do |m| map.add( m, instance_variable_get( m ) ) end end # else # out.scalar( taguri ) do |sc| # sc.embed( self.begin ) # sc.concat( self.exclude_end? ? "..." : ".." ) # sc.embed( self.end ) # end # end end end end # # Make an Regexp # class Regexp tag_as "tag:ruby.yaml.org,2002:regexp" def Regexp.yaml_new( klass, tag, val ) if String === val and val =~ /^\/(.*)\/([mix]*)$/ val = { 'regexp' => $1, 'mods' => $2 } end if Hash === val mods = nil unless val['mods'].to_s.empty? mods = 0x00 mods |= Regexp::EXTENDED if val['mods'].include?( 'x' ) mods |= Regexp::IGNORECASE if val['mods'].include?( 'i' ) mods |= Regexp::MULTILINE if val['mods'].include?( 'm' ) end val.delete( 'mods' ) r = YAML::object_maker( klass, {} ) Regexp.instance_method(:initialize). bind(r). call( val.delete( 'regexp' ), mods ) val.each { |k,v| r.instance_variable_set( k, v ) } r else raise YAML::TypeError, "Invalid Regular expression: " + val.inspect end end def to_yaml( opts = {} ) YAML::quick_emit( nil, opts ) do |out| if to_yaml_properties.empty? out.scalar( taguri, self.inspect, :plain ) else out.map( taguri, to_yaml_style ) do |map| src = self.inspect if src =~ /\A\/(.*)\/([a-z]*)\Z/ map.add( 'regexp', $1 ) map.add( 'mods', $2 ) else raise YAML::TypeError, "Invalid Regular expression: " + src end to_yaml_properties.each do |m| map.add( m, instance_variable_get( m ) ) end end end end end end # # Emit a Time object as an ISO 8601 timestamp # class Time tag_as "tag:ruby.yaml.org,2002:time" tag_as "tag:yaml.org,2002:timestamp" def Time.yaml_new( klass, tag, val ) if Hash === val t = val.delete( 'at' ) val.each { |k,v| t.instance_variable_set( k, v ) } t else raise YAML::TypeError, "Invalid Time: " + val.inspect end end def to_yaml( opts = {} ) YAML::quick_emit( object_id, opts ) do |out| tz = "Z" # from the tidy Tobias Peters Thanks! unless self.utc? utc_same_instant = self.dup.utc utc_same_writing = Time.utc(year,month,day,hour,min,sec,usec) difference_to_utc = utc_same_writing - utc_same_instant if (difference_to_utc < 0) difference_sign = '-' absolute_difference = -difference_to_utc else difference_sign = '+' absolute_difference = difference_to_utc end difference_minutes = (absolute_difference/60).round tz = "%s%02d:%02d" % [ difference_sign, difference_minutes / 60, difference_minutes % 60] end standard = self.strftime( "%Y-%m-%d %H:%M:%S" ) standard += ".%06d" % [usec] if usec.nonzero? standard += " %s" % [tz] if to_yaml_properties.empty? out.scalar( taguri, standard, :plain ) else out.map( taguri, to_yaml_style ) do |map| map.add( 'at', standard ) to_yaml_properties.each do |m| map.add( m, instance_variable_get( m ) ) end end end end end end # # Emit a Date object as a simple implicit # class Date tag_as "tag:yaml.org,2002:timestamp#ymd" def to_yaml( opts = {} ) YAML::quick_emit( object_id, opts ) do |out| out.scalar( "tag:yaml.org,2002:timestamp", self.to_s, :plain ) end end end # # Send Integer, Booleans, NilClass to String # class Numeric def to_yaml( opts = {} ) YAML::quick_emit( nil, opts ) do |out| str = self.to_s if str == "Infinity" str = ".Inf" elsif str == "-Infinity" str = "-.Inf" elsif str == "NaN" str = ".NaN" end out.scalar( taguri, str, :plain ) end end end class Fixnum tag_as "tag:yaml.org,2002:int" end class Float tag_as "tag:yaml.org,2002:float" end class TrueClass tag_as "tag:yaml.org,2002:bool#yes" def to_yaml( opts = {} ) YAML::quick_emit( nil, opts ) do |out| out.scalar( taguri, "true", :plain ) end end end class FalseClass tag_as "tag:yaml.org,2002:bool#no" def to_yaml( opts = {} ) YAML::quick_emit( nil, opts ) do |out| out.scalar( taguri, "false", :plain ) end end end class NilClass tag_as "tag:yaml.org,2002:null" def to_yaml( opts = {} ) YAML::quick_emit( nil, opts ) do |out| out.scalar( taguri, "", :plain ) end end end ruby-mkrf-0.2.3.orig/test/sample_files/syck-0.55/ext/ruby/lib/yaml/tag.rb0000644000000000000000000000567311672453175024466 0ustar rootroot# -*- mode: ruby; ruby-indent-level: 4; tab-width: 4 -*- vim: sw=4 ts=4 # $Id: tag.rb,v 1.6 2005/04/07 16:22:02 why Exp $ # # = yaml/tag.rb: methods for associating a taguri to a class. # # Author:: why the lucky stiff # module YAML # A dictionary of taguris which map to # Ruby classes. @@tagged_classes = {} # # Associates a taguri _tag_ with a Ruby class _cls_. The taguri is used to give types # to classes when loading YAML. Taguris are of the form: # # tag:authorityName,date:specific # # The +authorityName+ is a domain name or email address. The +date+ is the date the type # was issued in YYYY or YYYY-MM or YYYY-MM-DD format. The +specific+ is a name for # the type being added. # # For example, built-in YAML types have 'yaml.org' as the +authorityName+ and '2002' as the # +date+. The +specific+ is simply the name of the type: # # tag:yaml.org,2002:int # tag:yaml.org,2002:float # tag:yaml.org,2002:timestamp # # The domain must be owned by you on the +date+ declared. If you don't own any domains on the # date you declare the type, you can simply use an e-mail address. # # tag:why@ruby-lang.org,2004:notes/personal # def YAML.tag_class( tag, cls ) if @@tagged_classes.has_key? tag warn "class #{ @@tagged_classes[tag] } held ownership of the #{ tag } tag" end @@tagged_classes[tag] = cls end # Returns the complete dictionary of taguris, paired with classes. The key for # the dictionary is the full taguri. The value for each key is the class constant # associated to that taguri. # # YAML.tagged_classes["tag:yaml.org,2002:int"] => Integer # def YAML.tagged_classes @@tagged_classes end end class Module # :nodoc: all # Adds a taguri _tag_ to a class, used when dumping or loading the class # in YAML. See YAML::tag_class for detailed information on typing and # taguris. def tag_as( tag, sc = true ) class_eval <<-"end;" attr_accessor :taguri def taguri if respond_to? :to_yaml_type YAML::tagurize( to_yaml_type[1..-1] ) else return @taguri if @taguri tag = #{ tag.dump } if self.class.tag_subclasses? and self.class != YAML::tagged_classes[tag] tag = "\#{ tag }:\#{ self.class.tag_class_name }" end tag end end def self.tag_subclasses?; #{ sc ? 'true' : 'false' }; end end; YAML::tag_class tag, self end # Transforms the subclass name into a name suitable for display # in a subclassed tag. def tag_class_name self.name end # Transforms the subclass name found in the tag into a Ruby # constant name. def tag_read_class( name ) name end end ruby-mkrf-0.2.3.orig/test/sample_files/syck-0.55/ext/ruby/lib/yaml/stringio.rb0000644000000000000000000000366111672453175025544 0ustar rootroot# # Limited StringIO if no core lib is available # begin require 'stringio' rescue LoadError # StringIO based on code by MoonWolf class StringIO def initialize(string="") @string=string @pos=0 @eof=(string.size==0) end def pos @pos end def eof @eof end alias eof? eof def readline(rs=$/) if @eof raise EOFError else if p = @string[@pos..-1]=~rs line = @string[@pos,p+1] else line = @string[@pos..-1] end @pos+=line.size @eof =true if @pos==@string.size $_ = line end end def rewind seek(0,0) end def seek(offset,whence) case whence when 0 @pos=offset when 1 @pos+=offset when 2 @pos=@string.size+offset end @eof=(@pos>=@string.size) 0 end end # # Class method for creating streams # def YAML.make_stream( io ) if String === io io = StringIO.new( io ) elsif not IO === io raise YAML::Error, "YAML stream must be an IO or String object." end if YAML::unicode def io.readline YAML.utf_to_internal( readline( @ln_sep ), @utf_encoding ) end def io.check_unicode @utf_encoding = YAML.sniff_encoding( read( 4 ) ) @ln_sep = YAML.enc_separator( @utf_encoding ) seek( -4, IO::SEEK_CUR ) end def io.utf_encoding @utf_encoding end io.check_unicode else def io.utf_encoding :None end end io end end ruby-mkrf-0.2.3.orig/test/sample_files/syck-0.55/ext/ruby/lib/yaml/encoding.rb0000644000000000000000000000114411672453175025466 0ustar rootroot# # Handle Unicode-to-Internal conversion # module YAML # # Escape the string, condensing common escapes # def YAML.escape( value, skip = "" ) value.gsub( /\\/, "\\\\\\" ). gsub( /"/, "\\\"" ). gsub( /([\x00-\x1f])/ ) do |x| skip[x] || ESCAPES[ x.unpack("C")[0] ] end end # # Unescape the condenses escapes # def YAML.unescape( value ) value.gsub( /\\(?:([nevfbart\\])|0?x([0-9a-fA-F]{2})|u([0-9a-fA-F]{4}))/ ) { |x| if $3 ["#$3".hex ].pack('U*') elsif $2 [$2].pack( "H2" ) else UNESCAPES[$1] end } end end ruby-mkrf-0.2.3.orig/test/sample_files/syck-0.55/ext/ruby/lib/yaml/syck.rb0000644000000000000000000000041711672453175024653 0ustar rootroot# # YAML::Syck module # .. glues syck and yaml.rb together .. # require 'syck' require 'yaml/basenode' module YAML module Syck # # Mixin BaseNode functionality # class Node include YAML::BaseNode end end end ruby-mkrf-0.2.3.orig/test/sample_files/syck-0.55/ext/ruby/lib/yaml/constants.rb0000644000000000000000000000224211672453175025714 0ustar rootroot# # Constants used throughout the library # module YAML # # Constants # VERSION = '0.60' SUPPORTED_YAML_VERSIONS = ['1.0'] # # Parser tokens # WORD_CHAR = 'A-Za-z0-9' PRINTABLE_CHAR = '-_A-Za-z0-9!?/()$\'". ' NOT_PLAIN_CHAR = '\x7f\x0-\x1f\x80-\x9f' ESCAPE_CHAR = '[\\x00-\\x09\\x0b-\\x1f]' INDICATOR_CHAR = '*&!|\\\\^@%{}[]=' SPACE_INDICATORS = '-#:,?' RESTRICTED_INDICATORS = '#:,}]' DNS_COMP_RE = "\\w(?:[-\\w]*\\w)?" DNS_NAME_RE = "(?:(?:#{DNS_COMP_RE}\\.)+#{DNS_COMP_RE}|#{DNS_COMP_RE})" ESCAPES = %w{\x00 \x01 \x02 \x03 \x04 \x05 \x06 \a \x08 \t \n \v \f \r \x0e \x0f \x10 \x11 \x12 \x13 \x14 \x15 \x16 \x17 \x18 \x19 \x1a \e \x1c \x1d \x1e \x1f } UNESCAPES = { 'a' => "\x07", 'b' => "\x08", 't' => "\x09", 'n' => "\x0a", 'v' => "\x0b", 'f' => "\x0c", 'r' => "\x0d", 'e' => "\x1b", '\\' => '\\', } # # Default settings # DEFAULTS = { :Indent => 2, :UseHeader => false, :UseVersion => false, :Version => '1.0', :SortKeys => false, :AnchorFormat => 'id%03d', :ExplicitTypes => false, :WidthType => 'absolute', :BestWidth => 80, :UseBlock => false, :UseFold => false, :Encoding => :None } end ruby-mkrf-0.2.3.orig/test/sample_files/syck-0.55/ext/ruby/lib/yaml/store.rb0000644000000000000000000000062511672453175025037 0ustar rootroot# # YAML::Store # require 'yaml' require 'pstore' class YAML::Store < PStore def initialize( *o ) @opt = YAML::DEFAULTS.dup if String === o.first super(o.shift) end if o.last.is_a? Hash @opt.update(o.pop) end end def dump(table) @table.to_yaml(@opt) end def load(content) YAML::load(content) end def load_file(file) YAML::load(file) end end ruby-mkrf-0.2.3.orig/test/sample_files/syck-0.55/ext/ruby/lib/yaml/compat.rb0000644000000000000000000000162111672453175025163 0ustar rootroot# # Ruby 1.6 -> 1.8 compatibility # (an isolated incident) # class Object; alias_method :object_id, :id; end unless Object.respond_to? :object_id class Object; def instance_variable_set(k, v); self.instance_eval "#{k} = v"; end; end \ unless Object.respond_to? :instance_variable_set class Object; def instance_variable_get(k); self.instance_eval "#{k}"; end; end \ unless Object.respond_to? :instance_variable_get unless Object.respond_to? :allocate class Object def allocate name = self.class.name if Marshal::const_defined? :MAJOR_VERSION ostr = sprintf( "%c%co:%c%s\000", Marshal::MAJOR_VERSION, Marshal::MINOR_VERSION, name.length + 5, name ) else ostr = sprintf( "\004\006o:%c%s\000", name.length + 5, name ) end ::Marshal.load( ostr ) end end end ruby-mkrf-0.2.3.orig/test/sample_files/syck-0.55/ext/ruby/lib/yaml/basenode.rb0000644000000000000000000001373311672453175025467 0ustar rootroot# # YAML::BaseNode class # require 'yaml/ypath' module YAML # # YAML Generic Model container # module BaseNode # # Search for YPath entry and return # qualified nodes. # def select( ypath_str ) matches = match_path( ypath_str ) # # Create a new generic view of the elements selected # if matches result = [] matches.each { |m| result.push m.last } YAML.transfer( 'seq', result ) end end # # Search for YPath entry and return # transformed nodes. # def select!( ypath_str ) matches = match_path( ypath_str ) # # Create a new generic view of the elements selected # if matches result = [] matches.each { |m| result.push m.last.transform } result end end # # Search for YPath entry and return a list of # qualified paths. # def search( ypath_str ) matches = match_path( ypath_str ) if matches matches.collect { |m| path = [] m.each_index { |i| path.push m[i] if ( i % 2 ).zero? } "/" + path.compact.join( "/" ) } end end def at( seg ) if Hash === @value and @value.has_key?( seg ) @value[seg][1] elsif Array === @value and seg =~ /\A\d+\Z/ and @value[seg.to_i] @value[seg.to_i] end end # # YPath search returning a complete depth array # def match_path( ypath_str ) depth = 0 matches = [] YPath.each_path( ypath_str ) do |ypath| seg = match_segment( ypath, 0 ) matches += seg if seg end matches.uniq end # # Search a node for a single YPath segment # def match_segment( ypath, depth ) deep_nodes = [] seg = ypath.segments[ depth ] if seg == "/" unless String === @value idx = -1 @value.collect { |v| idx += 1 if Hash === @value match_init = [v[0], v[1][1]] match_deep = v[1][1].match_segment( ypath, depth ) else match_init = [idx, v] match_deep = v.match_segment( ypath, depth ) end if match_deep match_deep.each { |m| deep_nodes.push( match_init + m ) } end } end depth += 1 seg = ypath.segments[ depth ] end match_nodes = case seg when "." [[nil, self]] when ".." [["..", nil]] when "*" if @value.is_a? Enumerable idx = -1 @value.collect { |h| idx += 1 if Hash === @value [h[0], h[1][1]] else [idx, h] end } end else if seg =~ /^"(.*)"$/ seg = $1 elsif seg =~ /^'(.*)'$/ seg = $1 end if ( v = at( seg ) ) [[ seg, v ]] end end return deep_nodes unless match_nodes pred = ypath.predicates[ depth ] if pred case pred when /^\.=/ pred = $' match_nodes.reject! { |n| n.last.value != pred } else match_nodes.reject! { |n| n.last.at( pred ).nil? } end end return match_nodes + deep_nodes unless ypath.segments.length > depth + 1 #puts "DEPTH: #{depth + 1}" deep_nodes = [] match_nodes.each { |n| if n[1].is_a? BaseNode match_deep = n[1].match_segment( ypath, depth + 1 ) if match_deep match_deep.each { |m| deep_nodes.push( n + m ) } end else deep_nodes = [] end } deep_nodes = nil if deep_nodes.length == 0 deep_nodes end # # We want the node to act like as Hash # if it is. # def []( *k ) if Hash === @value v = @value.[]( *k ) v[1] if v elsif Array === @value @value.[]( *k ) end end def children if Hash === @value @value.values.collect { |c| c[1] } elsif Array === @value @value end end def children_with_index if Hash === @value @value.keys.collect { |i| [self[i], i] } elsif Array === @value i = -1; @value.collect { |v| i += 1; [v, i] } end end def emit transform.to_yaml end end end ruby-mkrf-0.2.3.orig/test/sample_files/syck-0.55/ext/ruby/lib/yaml/error.rb0000644000000000000000000000205511672453175025033 0ustar rootroot# # Error messages and exception class # module YAML # # Error messages # ERROR_NO_HEADER_NODE = "With UseHeader=false, the node Array or Hash must have elements" ERROR_NEED_HEADER = "With UseHeader=false, the node must be an Array or Hash" ERROR_BAD_EXPLICIT = "Unsupported explicit transfer: '%s'" ERROR_MANY_EXPLICIT = "More than one explicit transfer" ERROR_MANY_IMPLICIT = "More than one implicit request" ERROR_NO_ANCHOR = "No anchor for alias '%s'" ERROR_BAD_ANCHOR = "Invalid anchor: %s" ERROR_MANY_ANCHOR = "More than one anchor" ERROR_ANCHOR_ALIAS = "Can't define both an anchor and an alias" ERROR_BAD_ALIAS = "Invalid alias: %s" ERROR_MANY_ALIAS = "More than one alias" ERROR_ZERO_INDENT = "Can't use zero as an indentation width" ERROR_UNSUPPORTED_VERSION = "This release of YAML.rb does not support YAML version %s" ERROR_UNSUPPORTED_ENCODING = "Attempt to use unsupported encoding: %s" # # YAML Error classes # class Error < StandardError; end class ParseError < Error; end class TypeError < StandardError; end end ruby-mkrf-0.2.3.orig/test/sample_files/syck-0.55/ext/ruby/lib/yaml/baseemitter.rb0000644000000000000000000001317611672453175026214 0ustar rootroot# # BaseEmitter # require 'yaml/constants' require 'yaml/encoding' require 'yaml/error' module YAML module BaseEmitter def options( opt = nil ) if opt @options[opt] || YAML::DEFAULTS[opt] else @options end end def options=( opt ) @options = opt end # # Emit binary data # def binary_base64( value ) self << "!binary " self.node_text( [value].pack("m"), '|' ) end # # Emit plain, normal flowing text # def node_text( value, block = nil ) @seq_map = false valx = value.dup unless block block = if options(:UseBlock) '|' elsif not options(:UseFold) and valx =~ /\n[ \t]/ and not valx =~ /#{YAML::ESCAPE_CHAR}/ '|' else '>' end indt = $&.to_i if block =~ /\d+/ if valx =~ /(\A\n*[ \t#]|^---\s+)/ indt = options(:Indent) unless indt.to_i > 0 block += indt.to_s end block += if valx =~ /\n\Z\n/ "+" elsif valx =~ /\Z\n/ "" else "-" end end block += "\n" if block[0] == ?" esc_skip = ( "\t\n" unless valx =~ /^[ \t]/ ) || "" valx = fold( YAML::escape( valx, esc_skip ) + "\"" ).chomp self << '"' + indent_text( valx, indt, false ) else if block[0] == ?> valx = fold( valx ) end #p [block, indt] self << block + indent_text( valx, indt ) end end # # Emit a simple, unqouted string # def simple( value ) @seq_map = false self << value.to_s end # # Emit double-quoted string # def double( value ) "\"#{YAML.escape( value )}\"" end # # Emit single-quoted string # def single( value ) "'#{value}'" end # # Write a text block with the current indent # def indent_text( text, mod, first_line = true ) return "" if text.to_s.empty? spacing = indent( mod ) text = text.gsub( /\A([^\n])/, "#{ spacing }\\1" ) if first_line return text.gsub( /\n^([^\n])/, "\n#{spacing}\\1" ) end # # Write a current indent # def indent( mod = nil ) #p [ self.id, level, mod, :INDENT ] if level <= 0 mod ||= 0 else mod ||= options(:Indent) mod += ( level - 1 ) * options(:Indent) end return " " * mod end # # Add indent to the buffer # def indent! self << indent end # # Folding paragraphs within a column # def fold( value ) value.gsub( /(^[ \t]+.*$)|(\S.{0,#{options(:BestWidth) - 1}})(?:[ \t]+|(\n+(?=[ \t]|\Z))|$)/ ) do |s| $1 || $2 + ( $3 || "\n" ) end end # # Quick mapping # def map( type, &e ) val = Mapping.new e.call( val ) self << "#{type} " if type.length.nonzero? # # Empty hashes # if val.length.zero? self << "{}" @seq_map = false else # FIXME # if @buffer.length == 1 and options(:UseHeader) == false and type.length.zero? # @headless = 1 # end defkey = @options.delete( :DefaultKey ) if defkey seq_map_shortcut self << "= : " defkey.to_yaml( :Emitter => self ) end # # Emit the key and value # val.each { |v| seq_map_shortcut if v[0].is_complex_yaml? self << "? " end v[0].to_yaml( :Emitter => self ) if v[0].is_complex_yaml? self << "\n" indent! end self << ": " v[1].to_yaml( :Emitter => self ) } end end def seq_map_shortcut # FIXME: seq_map needs to work with the new anchoring system # if @seq_map # @anchor_extras[@buffer.length - 1] = "\n" + indent # @seq_map = false # else self << "\n" indent! # end end # # Quick sequence # def seq( type, &e ) @seq_map = false val = Sequence.new e.call( val ) self << "#{type} " if type.length.nonzero? # # Empty arrays # if val.length.zero? self << "[]" else # FIXME # if @buffer.length == 1 and options(:UseHeader) == false and type.length.zero? # @headless = 1 # end # # Emit the key and value # val.each { |v| self << "\n" indent! self << "- " @seq_map = true if v.class == Hash v.to_yaml( :Emitter => self ) } end end end # # Emitter helper classes # class Mapping < Array def add( k, v ) push [k, v] end end class Sequence < Array def add( v ) push v end end end ruby-mkrf-0.2.3.orig/test/sample_files/syck-0.55/ext/ruby/lib/yaml/dbm.rb0000644000000000000000000000437211672453175024450 0ustar rootrootrequire 'yaml' require 'dbm' # # YAML + DBM = YDBM # - Same interface as DBM class # module YAML class DBM < ::DBM VERSION = "0.1" def []( key ) fetch( key ) end def []=( key, val ) store( key, val ) end def fetch( keystr, ifnone = nil ) begin val = super( keystr ) return YAML::load( val ) if String === val rescue IndexError end if block_given? yield keystr else ifnone end end def index( keystr ) super( keystr.to_yaml ) end def values_at( *keys ) keys.collect { |k| fetch( k ) } end def delete( key ) v = super( key ) if String === v v = YAML::load( v ) end v end def delete_if del_keys = keys.dup del_keys.delete_if { |k| yield( k, fetch( k ) ) == false } del_keys.each { |k| delete( k ) } self end def reject hsh = self.to_hash hsh.reject { |k,v| yield k, v } end def each_pair keys.each { |k| yield k, fetch( k ) } self end def each_value super { |v| yield YAML::load( v ) } self end def values super.collect { |v| YAML::load( v ) } end def has_value?( val ) each_value { |v| return true if v == val } return false end def invert h = {} keys.each { |k| h[ self.fetch( k ) ] = k } h end def replace( hsh ) clear update( hsh ) end def shift a = super a[1] = YAML::load( a[1] ) if a a end def select( *keys ) if block_given? self.keys.collect { |k| v = self[k]; [k, v] if yield k, v }.compact else values_at( *keys ) end end def store( key, val ) super( key, val.to_yaml ) val end def update( hsh ) hsh.keys.each do |k| self.store( k, hsh.fetch( k ) ) end self end def to_a a = [] keys.each { |k| a.push [ k, self.fetch( k ) ] } a end def to_hash h = {} keys.each { |k| h[ k ] = self.fetch( k ) } h end alias :each :each_pair end end ruby-mkrf-0.2.3.orig/test/sample_files/syck-0.55/ext/ruby/lib/yaml/ypath.rb0000644000000000000000000000246011672453175025027 0ustar rootroot# # YAML::YPath # module YAML class YPath attr_accessor :segments, :predicates, :flags def initialize( str ) @segments = [] @predicates = [] @flags = nil while str =~ /^\/?(\/|[^\/\[]+)(?:\[([^\]]+)\])?/ @segments.push $1 @predicates.push $2 str = $' end unless str.to_s.empty? @segments += str.split( "/" ) end if @segments.length == 0 @segments.push "." end end def YPath.each_path( str ) # # Find choices # paths = [] str = "(#{ str })" while str.sub!( /\(([^()]+)\)/, "\n#{ paths.length }\n" ) paths.push $1.split( '|' ) end # # Construct all possible paths # all = [ str ] ( paths.length - 1 ).downto( 0 ) do |i| all = all.collect do |a| paths[i].collect do |p| a.gsub( /\n#{ i }\n/, p ) end end.flatten.uniq end all.collect do |path| yield YPath.new( path ) end end end end ruby-mkrf-0.2.3.orig/test/sample_files/syck-0.55/ext/ruby/lib/okay/0000755000000000000000000000000011672453175023354 5ustar rootrootruby-mkrf-0.2.3.orig/test/sample_files/syck-0.55/ext/ruby/lib/okay/rpc.rb0000644000000000000000000003433011672453175024470 0ustar rootroot# # The !okay/rpc module for YAML.rb # Specification at http://wiki.yaml.org/yamlwiki/OkayRpcProtocol # require 'okay' require 'net/http' module Okay module RPC VERSION = '0.06' class Method attr_accessor :methodName, :params def initialize( mn, p ) @methodName = mn @params = p end def to_yaml( opts = {} ) YAML::quick_emit( self.id, opts ) { |out| out.map( "!okay/rpc/method" ) { |map| map.add( self.methodName, self.params ) } } end end YAML.add_domain_type( "okay.yaml.org,2002", "rpc/method" ) { |type, val| if val.class == Hash and val.length == 1 val = val.to_a.first RPC::Method.new( val[0], val[1] ) end } class Fault attr_accessor :code, :message def initialize( c, m ) @code = c @message = m end def to_yaml( opts = {} ) YAML::quick_emit( self.id, opts ) { |out| out.map( "!okay/rpc/fault" ) { |map| map.add( self.code, self.message ) } } end end YAML.add_domain_type( "okay.yaml.org,2002", "rpc/fault" ) { |type, val| if val.class == Hash and val.length == 1 val = val.to_a.first Fault.new( val[0], val[1] ) end } class Client USER_AGENT = "Okay::RPC::Client (Ruby #{RUBY_VERSION})" def initialize(host=nil, path=nil, port=nil, proxy_host=nil, proxy_port=nil, user=nil, password=nil, use_ssl=nil, timeout=nil) @host = host || "localhost" @path = path || "/okayRpc/" @proxy_host = proxy_host @proxy_port = proxy_port @proxy_host ||= 'localhost' if @proxy_port != nil @proxy_port ||= 8080 if @proxy_host != nil @use_ssl = use_ssl || false @timeout = timeout || 30 @queue = YAML::Stream.new( :UseHeader => true ) if use_ssl require "net/https" @port = port || 443 else @port = port || 80 end @user, @password = user, password set_auth # convert ports to integers @port = @port.to_i if @port != nil @proxy_port = @proxy_port.to_i if @proxy_port != nil # HTTP object for synchronous calls Net::HTTP.version_1_2 @http = Net::HTTP.new(@host, @port, @proxy_host, @proxy_port) @http.use_ssl = @use_ssl if @use_ssl @http.read_timeout = @timeout @http.open_timeout = @timeout @parser = nil @create = nil end # Attribute Accessors ------------------------------------------------------------------- attr_reader :timeout, :user, :password def timeout=(new_timeout) @timeout = new_timeout @http.read_timeout = @timeout @http.open_timeout = @timeout end def user=(new_user) @user = new_user set_auth end def password=(new_password) @password = new_password set_auth end # Call methods -------------------------------------------------------------- def call(method, *args) meth = Method.new( method, args ) YAML::load( do_rpc(meth.to_yaml, false) ) end def qcall(method, *args) @queue.add( Method.new( method, args ) ) end def qrun ret = YAML.load_stream( do_rpc( @queue.emit, false ) ) @queue = YAML::Stream.new( :UseHeader => true ) ret end private # ---------------------------------------------------------- def set_auth if @user.nil? @auth = nil else a = "#@user" a << ":#@password" if @password != nil @auth = ("Basic " + [a].pack("m")).chomp end end def do_rpc(request, async=false) header = { "User-Agent" => USER_AGENT, "Content-Type" => "text/yaml", "Content-Length" => request.size.to_s, "Connection" => (async ? "close" : "keep-alive") } if @auth != nil # add authorization header header["Authorization"] = @auth end if async # use a new HTTP object for each call Net::HTTP.version_1_2 http = Net::HTTP.new(@host, @port, @proxy_host, @proxy_port) http.use_ssl = @use_ssl if @use_ssl http.read_timeout = @timeout http.open_timeout = @timeout else # reuse the HTTP object for each call => connection alive is possible http = @http end # post request resp = http.post2(@path, request, header) data = resp.body http.finish if async if resp.code == "401" # Authorization Required raise "Authorization failed.\nHTTP-Error: #{resp.code} #{resp.message}" elsif resp.code[0,1] != "2" raise "HTTP-Error: #{resp.code} #{resp.message}" end if resp["Content-Type"] != "text/yaml" raise "Wrong content-type [#{resp['Content-Type']}]: \n#{data}" end expected = resp["Content-Length"] || "" if data.nil? or data.size == 0 raise "Wrong size. Was #{data.size}, should be #{expected}" elsif expected.to_i != data.size and resp["Transfer-Encoding"].nil? raise "Wrong size. Was #{data.size}, should be #{expected}" end return data end end class BasicServer def initialize @handlers = {} @aboutHash = { 'name' => 'An !okay/rpc server.', 'uri' => nil, 'version' => VERSION, 'authors' => nil, 'about' => "Welcome to the !okay/rpc server." } add_introspection end def name( str ) @aboutHash['name'] end def name=( str ) @aboutHash['name'] = str end def about @aboutHash['about'] end def about=( str ) @aboutHash['about'] = str.strip.gsub( /^ +/, '' ).gsub( /\n(\n*)/, ' \1' ) end def uri( str ) @aboutHash['uri'] end def uri=( str ) @aboutHash['uri'] = str end def add_author( name, email, url ) @aboutHash['authors'] ||= [] @aboutHash['authors'] << { 'name' => name, 'email' => email, 'url' => url } end def add_handler( methodName, sig, doc, &block ) @handlers[ methodName ] = { :signature => sig, :help => doc, :block => block } end def get_handler( methodName, prop ) unless @handlers.has_key?( methodName ) Fault.new( 101, "No method ''#{methodName}'' available." ) else @handlers[ methodName ][ prop ] end end def dispatch( meth ) b = get_handler( meth.methodName, :block ) return b if b.is_a? Fault b.call( meth ) end def process( meth_yml ) s = YAML::Stream.new( :UseHeader => true ) YAML::load_documents( meth_yml ) { |doc| s.add( dispatch( doc ) ) } s.emit end private def add_introspection add_handler( "system.about", %w(map), "Provides a short description of this !okay/rpc server's intent." ) { @aboutHash } add_handler( "system.getCapabilities", %w(map), "Describes this server's capabilities, including version of " + "!okay/rpc available and YAML implementation." ) { { 'okay/rpc' => { 'version' => Okay::RPC::VERSION, 'id' => 'YAML.rb Okay::RPC', 'url' => 'http://wiki.yaml.org/yamlwiki/OkayRpcProtocol' }, 'yaml' => { 'version' => YAML::VERSION, 'id' => 'YAML.rb', 'url' => 'http://yaml4r.sf.net/' }, 'sys' => { 'version' => Kernel::RUBY_VERSION, 'id' => Kernel::RUBY_PLATFORM } } } add_handler( "system.listMethods", %w(seq), "Lists the available methods for this !okay/rpc server." ) { @handlers.keys.sort } add_handler( "system.methodSignature", %w(seq str), "Returns a method signature." ) { |meth| get_handler( meth.params[0], :signature ) } add_handler( "system.methodHelp", %w(str str), "Returns help on using this method." ) { |meth| get_handler( meth.params[0], :help ) } add_handler( "system.methodBlank", %w(str str), "Returns a blank method." ) { |meth| sig = get_handler( meth.params[0], :signature ) unless sig.is_a? Fault "--- !okay/rpc/method\n#{meth.params[0]}:\n" + sig[1..-1].collect { |type| " - !#{type}" }.join( "\n" ) else sig end } end end class ModRubyServer < BasicServer def initialize(*a) @ap = Apache::request super(*a) url ||= @ap.uri end def serve catch(:exit_serve) { header = {} hdr_in_proc = proc {|key, value| header[key.capitalize] = value} if @ap.respond_to? :headers_in @ap.headers_in.each( &hdr_in_proc ) else @ap.each_header( &hdr_in_proc ) end length = header['Content-length'].to_i http_error(405, "Method Not Allowed") unless @ap.request_method == "POST" http_error(400, "Bad Request") unless header['Content-type'] == "text/yaml" http_error(411, "Length Required") unless length > 0 # TODO: do we need a call to binmode? @ap.binmode data = @ap.read(length) http_error(400, "Bad Request") if data.nil? or data.size != length resp = begin process(data) rescue Exception => e Fault.new(101, e.message).to_yaml end http_write(resp, 200, "Content-type" => "text/yaml") } end private def http_error(status, message) err = "#{status} #{message}" msg = <<-"MSGEND" #{err}

#{err}

Unexpected error occured while processing !okay/rpc request!

MSGEND http_write(msg, status, "Status" => err, "Content-type" => "text/html") throw :exit_serve # exit from the #serve method end def http_write(body, status, header) h = {} header.each {|key, value| h[key.to_s.capitalize] = value} h['Status'] ||= "200 OK" h['Content-length'] ||= body.size.to_s h.each do |key, value| if @ap.respond_to? :headers_out @ap.headers_out[key] = value else @ap[key] = value end end @ap.content_type = h["Content-type"] @ap.status = status.to_i @ap.send_http_header @ap.print body end end end end Okay.load_schema( < XML-RPC for YAML. examples: > Testing schema: - map: /*: [ seq ] length: 1 EOY ruby-mkrf-0.2.3.orig/test/sample_files/syck-0.55/ext/ruby/lib/okay/news.rb0000644000000000000000000000265011672453175024660 0ustar rootroot# # The !okay/news module for YAML.rb # require 'okay' module Okay class News < ModuleBase attr_accessor :title, :link, :description, :updatePeriod, :items def to_yaml_properties [ '@title', '@link', '@description', '@updatePeriod', '@items' ] end def to_yaml_type "!okay/news" end end Okay.add_type( "news" ) { |type, val, modules| Okay.object_maker( Okay::News, val, modules ) } class NewsItem < ModuleBase attr_accessor :title, :link, :description, :pubTime def to_yaml_properties [ '@title', '@link', '@description', '@pubTime' ] end def to_yaml_type "!okay/news/item" end end Okay.add_type( "news/item" ) { |type, val, modules| Okay.object_maker( Okay::NewsItem, val, modules ) } end Okay.load_schema( < Inspired by RSS, more limited... examples: > If I had a news site... schema: - map: /title: [ str ] /link: [ str ] /description: [ str ] /updatePeriod: [ str ] /items: - seq: { /*: [ okay/news/item ] } okay/news/item: description: > Inside okay/news lies... examples: > See okay/news examples... schema: - map: /title: [ str ] /pubTime: [ time ] /link: [ str ] /description: [ str ] optional: [ /title ] EOY ruby-mkrf-0.2.3.orig/test/sample_files/syck-0.55/ext/ruby/lib/yaml.rb0000644000000000000000000003035711672453175023710 0ustar rootroot# -*- mode: ruby; ruby-indent-level: 4; tab-width: 4 -*- vim: sw=4 ts=4 # $Id: yaml.rb,v 1.30 2005/04/07 16:22:02 why Exp $ # # = yaml.rb: top-level module with methods for loading and parsing YAML documents # # Author:: why the lucky stiff # require 'stringio' require 'yaml/compat' require 'yaml/error' require 'yaml/syck' require 'yaml/tag' require 'yaml/stream' # == YAML # # YAML(tm) (rhymes with 'camel') is a # straightforward machine parsable data serialization format designed for # human readability and interaction with scripting languages such as Perl # and Python. YAML is optimized for data serialization, formatted # dumping, configuration files, log files, Internet messaging and # filtering. This specification describes the YAML information model and # serialization format. Together with the Unicode standard for characters, it # provides all the information necessary to understand YAML Version 1.0 # and construct computer programs to process it. # # See http://yaml.org/ for more information. For a quick tutorial, please # visit YAML In Five Minutes (http://yaml.kwiki.org/?YamlInFiveMinutes). # # == About This Library # # The YAML 1.0 specification outlines four stages of YAML loading and dumping. # This library honors all four of those stages, although data is really only # available to you in three stages. # # The four stages are: native, representation, serialization, and presentation. # # The native stage refers to data which has been loaded completely into Ruby's # own types. (See +YAML::load+.) # # The representation stage means data which has been composed into # +YAML::BaseNode+ objects. In this stage, the document is available as a # tree of node objects. You can perform YPath queries and transformations # at this level. (See +YAML::parse+.) # # The serialization stage happens inside the parser. The YAML parser used in # Ruby is called Syck. Serialized nodes are available in the extension as # SyckNode structs. # # The presentation stage is the YAML document itself. This is accessible # to you as a string. (See +YAML::dump+.) # # For more information about the various information models, see Chapter # 3 of the YAML 1.0 Specification (http://yaml.org/spec/#id2491269). # # The YAML module provides quick access to the most common loading (YAML::load) # and dumping (YAML::dump) tasks. This module also provides an API for registering # global types (YAML::add_domain_type). # # == Example # # A simple round-trip (load and dump) of an object. # # require "yaml" # # test_obj = ["dogs", "cats", "badgers"] # # yaml_obj = YAML::dump( test_obj ) # # -> --- # - dogs # - cats # - badgers # ruby_obj = YAML::load( yaml_obj ) # # => ["dogs", "cats", "badgers"] # ruby_obj == test_obj # # => true # # To register your custom types with the global resolver, use +add_domain_type+. # # YAML::add_domain_type( "your-site.com,2004", "widget" ) do |type, val| # Widget.new( val ) # end # module YAML Resolver = YAML::Syck::Resolver DefaultResolver = YAML::Syck::DefaultResolver DefaultResolver.use_types_at( @@tagged_classes ) GenericResolver = YAML::Syck::GenericResolver Parser = YAML::Syck::Parser Emitter = YAML::Syck::Emitter # Returns a new default parser def YAML.parser; Parser.new.set_resolver( YAML.resolver ); end # Returns a new generic parser def YAML.generic_parser; Parser.new.set_resolver( GenericResolver ); end # Returns the default resolver def YAML.resolver; DefaultResolver; end # Returns a new default emitter def YAML.emitter; Emitter.new.set_resolver( YAML.resolver ); end # # Converts _obj_ to YAML and writes the YAML result to _io_. # # File.open( 'animals.yaml', 'w' ) do |out| # YAML.dump( ['badger', 'elephant', 'tiger'], out ) # end # # If no _io_ is provided, a string containing the dumped YAML # is returned. # # YAML.dump( :locked ) # #=> "--- :locked" # def YAML.dump( obj, io = nil ) obj.to_yaml( io || io2 = StringIO.new ) io || ( io2.rewind; io2.read ) end # # Load a document from the current _io_ stream. # # File.open( 'animals.yaml' ) { |yf| YAML::load( yf ) } # #=> ['badger', 'elephant', 'tiger'] # # Can also load from a string. # # YAML.load( "--- :locked" ) # #=> :locked # def YAML.load( io ) yp = parser.load( io ) end # # Load a document from the file located at _filepath_. # # YAML.load_file( 'animals.yaml' ) # #=> ['badger', 'elephant', 'tiger'] # def YAML.load_file( filepath ) File.open( filepath ) do |f| load( f ) end end # # Parse the first document from the current _io_ stream # # File.open( 'animals.yaml' ) { |yf| YAML::load( yf ) } # #=> #, # #, # #]> # # Can also load from a string. # # YAML.parse( "--- :locked" ) # #=> # # def YAML.parse( io ) yp = generic_parser.load( io ) end # # Parse a document from the file located at _filepath_. # # YAML.parse_file( 'animals.yaml' ) # #=> #, # #, # #]> # def YAML.parse_file( filepath ) File.open( filepath ) do |f| parse( f ) end end # # Calls _block_ with each consecutive document in the YAML # stream contained in _io_. # # File.open( 'many-docs.yaml' ) do |yf| # YAML.each_document( yf ) do |ydoc| # ## ydoc contains the single object # ## from the YAML document # end # end # def YAML.each_document( io, &block ) yp = parser.load_documents( io, &block ) end # # Calls _block_ with each consecutive document in the YAML # stream contained in _io_. # # File.open( 'many-docs.yaml' ) do |yf| # YAML.load_documents( yf ) do |ydoc| # ## ydoc contains the single object # ## from the YAML document # end # end # def YAML.load_documents( io, &doc_proc ) YAML.each_document( io, &doc_proc ) end # # Calls _block_ with a tree of +YAML::BaseNodes+, one tree for # each consecutive document in the YAML stream contained in _io_. # # File.open( 'many-docs.yaml' ) do |yf| # YAML.each_node( yf ) do |ydoc| # ## ydoc contains a tree of nodes # ## from the YAML document # end # end # def YAML.each_node( io, &doc_proc ) yp = generic_parser.load_documents( io, &doc_proc ) end # # Calls _block_ with a tree of +YAML::BaseNodes+, one tree for # each consecutive document in the YAML stream contained in _io_. # # File.open( 'many-docs.yaml' ) do |yf| # YAML.parse_documents( yf ) do |ydoc| # ## ydoc contains a tree of nodes # ## from the YAML document # end # end # def YAML.parse_documents( io, &doc_proc ) YAML.each_node( io, &doc_proc ) end # # Loads all documents from the current _io_ stream, # returning a +YAML::Stream+ object containing all # loaded documents. # def YAML.load_stream( io ) d = nil parser.load_documents( io ) do |doc| d = YAML::Stream.new if not d d.add( doc ) end return d end # # Returns a YAML stream containing each of the items in +objs+, # each having their own document. # # YAML.dump_stream( 0, [], {} ) # #=> --- 0 # --- [] # --- {} # def YAML.dump_stream( *objs ) d = YAML::Stream.new objs.each do |doc| d.add( doc ) end d.emit end # # Add a global handler for a YAML domain type. # def YAML.add_domain_type( domain, type_tag, &transfer_proc ) resolver.add_type( "tag:#{ domain }:#{ type_tag }", transfer_proc ) end # # Add a transfer method for a builtin type # def YAML.add_builtin_type( type_tag, &transfer_proc ) resolver.add_type( "tag:yaml.org,2002:#{ type_tag }", transfer_proc ) end # # Add a transfer method for a builtin type # def YAML.add_ruby_type( type, &transfer_proc ) resolver.add_type( "tag:ruby.yaml.org,2002:#{ type_tag }", transfer_proc ) end # # Add a private document type # def YAML.add_private_type( type_re, &transfer_proc ) resolver.add_type( "x-private:" + type_re, transfer_proc ) end # # Detect typing of a string # def YAML.detect_implicit( val ) resolver.detect_implicit( val ) end # # Convert a type_id to a taguri # def YAML.tagurize( val ) resolver.tagurize( val ) end # # Apply a transfer method to a Ruby object # def YAML.transfer( type_id, obj ) resolver.transfer( YAML.tagurize( type_id ), obj ) end # # Apply any implicit a node may qualify for # def YAML.try_implicit( obj ) YAML.transfer( YAML.detect_implicit( obj ), obj ) end # # Method to extract colon-seperated type and class, returning # the type and the constant of the class # def YAML.read_type_class( type, obj_class ) scheme, domain, type, tclass = type.split( ':', 4 ) tclass.split( "::" ).each { |c| obj_class = obj_class.const_get( c ) } if tclass return [ type, obj_class ] end # # Allocate blank object # def YAML.object_maker( obj_class, val ) if Hash === val o = obj_class.allocate val.each_pair { |k,v| o.instance_variable_set("@#{k}", v) } o else raise YAML::Error, "Invalid object explicitly tagged !ruby/Object: " + val.inspect end end # # Allocate an Emitter if needed # def YAML.quick_emit( oid, opts = {}, &e ) out = if opts.is_a? YAML::Emitter opts else emitter.reset( opts ) end out.emit( oid, &e ) end end require 'yaml/rubytypes' require 'yaml/types' module Kernel # :nodoc: # # ryan:: You know how Kernel.p is a really convenient way to dump ruby # structures? The only downside is that it's not as legible as # YAML. # # _why:: (listening) # # ryan:: I know you don't want to urinate all over your users' namespaces. # But, on the other hand, convenience of dumping for debugging is, # IMO, a big YAML use case. # # _why:: Go nuts! Have a pony parade! # # ryan:: Either way, I certainly will have a pony parade. # # Prints any supplied _objects_ out in YAML. Intended as # a variation on +Kernel::p+. # # S = Struct.new(:name, :state) # s = S['dave', 'TX'] # y s # # _produces:_ # # --- !ruby/struct:S # name: dave # state: TX # def y( object, *objects ) objects.unshift object puts( if objects.length == 1 YAML::dump( *objects ) else YAML::dump_stream( *objects ) end ) end private :y end ruby-mkrf-0.2.3.orig/test/sample_files/syck-0.55/ext/ruby/lib/yod.rb0000644000000000000000000007573211672453175023547 0ustar rootroot# vim:sw=4:ts=4 # $Id: yod.rb,v 1.1 2003/03/20 13:25:38 whythluckystiff Exp $ # # YOD: Yaml Ok Documentation # require 'yaml' # # Hi. Yod is... well... something. Okay, look. # It's a personal project for building documentation. # It's also meant to be an example project. To give # a more complete example of YAML's use. # module Yod VERSION = '0.2' SECTION_GIF =< 2, :y => 0.0, :title => title, :sections => [] } end def pdf.textPara( fontName, fontSize, fontAlign, indent, ybump, textchunk, force_page = false ) if force_page @guts[:y] = 0.0 end begint = false textchunk.split( /\n/ ).each do |text| j = 0 j_p = 0 line_width = ( 6.6 - ( indent * 2.0 ) ) * ClibPDF::INCH self.setFont( fontName, "MacRomanEncoding", fontSize ) while j = text.index( /\s/, j ) or text.length.nonzero? unless j j = -1 j_p = -1 end if j < 0 or self.stringWidth( text[ 0..j-1 ] ) > line_width if @guts[:y] < 0.6 # Start a new page if begint self.endText begint = false end @guts[:page] += 1 @guts[:y] = 10.2 self.pageInit( @guts[:page], ClibPDF::PORTRAIT, ClibPDF::LETTER, ClibPDF::LETTER) self.beginText( 0 ) self.setFont( "NewCenturySchlbk-Roman", "MacRomanEncoding", 10.0) self.text(1.0, 0.2, 0.0, @guts[:title]) self.textAligned( 7.8, 0.2, 0.0, ClibPDF::TEXTPOS_MR, "Page #{@guts[:page]}" ) self.endText self.setgray(0.0) self.setlinewidth(0.2) self.moveto(1.0, 0.4) self.lineto(7.8, 0.4) self.stroke end # # Set the font # unless begint self.beginText( 0 ) self.setFont( fontName, "MacRomanEncoding", fontSize ) begint = true end line = text[ 0..j_p ] # self.text( 1.0 + indent, @guts[:y], 0.0, line ) align = ClibPDF::TEXTPOS_ML origin = 1.0 + indent if fontAlign == RIGHT align = ClibPDF::TEXTPOS_MR origin = 7.8 - indent elsif fontAlign == CENTER align = ClibPDF::TEXTPOS_MM origin = 4.4 end self.textAligned( origin, @guts[:y], 0.0, align, line ) if j_p > 0 text = text[ j_p + 2..-1 ] else text = "" end j = 0 j_p = 0 @guts[:y] -= ybump next end j_p = j - 1 j += 1 end end self.newline if begint self.endText end end def pdf.newline @guts[:y] -= 0.15 end pdf.initPara( @title ) @table_of_contents.each_with_index { |x, i| @contents[x].to_pdf( :Page, pdf, [ i + 1 ] ) } pdf.finalizeAll pdf.savePDFmemoryStreamToFile( output ) pdf.close() when :TableOfContents pdf = args.shift @table_of_contents.each_with_index { |x, i| @contents[x].to_pdf( :TableOfContents, pdf, i + 1 ) } end end def to_chm( *args ) page = args.shift case page when :MakeAll output, prefix = args Yod.multi_mkdir( output, 0755 ) # Write the INI File.open( File.join( output, "#{prefix}.hhp" ), "w" ) { |f| f.write( self.to_chm( :IniFile, prefix ) ) } # Write the bullet GIF File.open( File.join( output, "section.gif" ), "w" ) { |f| f.write( Yod::SECTION_GIF.unpack( "m*" )[0] ) } # Write the CSS File.open( File.join( output, "global.css" ), "w" ) { |f| f.write( Yod::TEXT_CSS ) } # Write the Index File.open( File.join( output, "Index.hhk" ), "w" ) { |f| f.write( self.to_chm( :Index ) ) } # Write the TOC File.open( File.join( output, "Table of Contents.hhc" ), "w" ) { |f| f.write( self.to_chm( :TableOfContents ) ) } # Write each page of the manual self.pages.each { |p| Yod.multi_mkdir( output + File::SEPARATOR + p.html_file_name.split( File::SEPARATOR )[0], 0755 ) File.open( File.join( output, p.html_file_name ), "w" ) { |f| f.write( p.to_chm( :Html ) ) } } when :IniFile prefix = args.shift page_list = self.pages.collect { |p| p.html_file_name } return < EOF } return <
    #{index_content}
EOF when :TableOfContents return <
    #{@table_of_contents.collect { |x| @contents[x].to_chm( :TableOfContents ) }}
EOF end end def to_html( *args ) page = args.shift case page when :MakeAll output = args.shift Yod.multi_mkdir( output, 0755 ) # Write the bullet GIF File.open( File.join( output, "section.gif" ), "w" ) { |f| f.write( Yod::SECTION_GIF.unpack( "m*" )[0] ) } # Write the CSS File.open( File.join( output, "global.css" ), "w" ) { |f| f.write( Yod::TEXT_CSS ) } # Write the Index frame File.open( File.join( output, "index.html" ), "w" ) { |f| f.write( self.to_html( :Frame ) ) } # Write the TOC File.open( File.join( output, "contents.html" ), "w" ) { |f| f.write( self.to_html( :TableOfContents ) ) } # Write each page of the manual self.pages.each { |p| Yod.multi_mkdir( output + File::SEPARATOR + p.html_file_name.split( File::SEPARATOR )[0], 0755 ) File.open( File.join( output, p.html_file_name ), "w" ) { |f| f.write( p.to_html( :Html ) ) } } when :Frame prefix = args.shift page_list = self.pages.collect { |p| p.html_file_name } return < #{@title} EOF when :TableOfContents return <
CONTENTS

#{@table_of_contents.collect { |x| @contents[x].to_html( :TableOfContents ) }}

EOF end end end class DocElement; attr_accessor :title, :elements; def pages; self; end; end class Group < DocElement def initialize( data ) @elements = [] data.each { |ele| name, ele = ele.to_a[0] if ele.is_a?( Yod::DocElement ) ele.title = name @elements << ele else raise Yod::Error, "Invalid node of type #{ele.class} in Group: " + ele.inspect end } end def pages self.elements.collect { |e| e.pages } end def index index_all = {} self.elements.each { |e| index_all.update( e.index ) if e.respond_to? :index } index_all end def to_man( *args ) page = args.shift case page when :Page depth = args.shift if depth.zero? str = ".SH #{self.title}\n" else str = ".Sh #{self.title}\n" end self.elements.each { |e| str += e.to_man( :Page, depth + 1 ) } str end end def to_pdf( *args ) page = args.shift case page when :TableOfContents pdf, idx = args pdf.textCRLFshow( "#{idx}. #{self.title}" ) self.elements.each_with_index { |e, i| e.to_pdf( :TableOfContents, pdf, "#{idx}.#{i+1}" ) } when :Page pdf, idx = args size = 22.0 - ( idx.length * 2.0 ) if size < 14.0 size = 14.0 end pdf.newline pdf.textPara( "NewCenturySchlbk-Roman", size, LEFT, 0.0, 0.4, "#{idx.join('.')}. #{self.title}", idx.length == 1 ) self.elements.each_with_index { |e, i| e.to_pdf( :Page, pdf, [ idx ] + [ i + 1 ] ) } end end def to_chm( page ) case page when :TableOfContents return <
    #{self.elements.collect { |e| e.to_chm( :TableOfContents ) }}
EOF end end def to_html( page ) case page when :TableOfContents return < #{self.title}
    #{self.elements.collect { |e| e.to_html( :TableOfContents ) }}
EOF end end end class Page < DocElement def initialize( data ) @elements = [] data.each { |ele| if ele.is_a?( Yod::PageElement ) @elements << ele else raise Yod::Error, "Invalid node of type #{ele.class} in Page: " + ele.inspect end } end def html_file_name "page" + File::SEPARATOR + @title.downcase.gsub( /[^A-Za-z0-9]/, '_' ) + ".htm" end def index index_all = { @title => self.html_file_name } end def to_man( *args ) page = args.shift case page when :Page depth = args.shift if depth.zero? str = ".SH #{self.title}\n" else str = ".Sh #{self.title}\n" end self.elements.collect { |e| str += e.to_man } str end end def to_pdf( *args ) page = args.shift case page when :TableOfContents pdf, idx = args pdf.textCRLFshow( "#{idx}. #{self.title}" ) when :Page pdf, idx = args size = 22.0 - ( idx.length * 2.0 ) if size < 14.0 size = 14.0 end pdf.newline pdf.textPara( "NewCenturySchlbk-Roman", size, LEFT, 0.0, 0.4, "#{idx.join('.')}. #{self.title}", idx.length == 1 ) self.elements.collect { |e| e.to_pdf( pdf ) } end end def to_chm( page ) case page when :TableOfContents return < EOF when :Html return self.to_html( :Html ) end end def to_html( page ) case page when :TableOfContents return < #{@title} EOF when :Html return < #{@title}
#{@title}
#{self.elements.collect { |e| e.to_html( :Html ) }} EOF end end end class CodePage < Page def initialize( data ) super( data ) ctr = 0 @elements.each { |e| e.text['no'] = ( ctr += 1 ) if Yod::Code === e } end end class ClassDef < Group attr_accessor :const_name def title=( t ) @title = t @const_name = t @elements.each { |e| e.add_const_namespace( self.const_namespace ) } end def add_const_namespace( ns ) self.const_name = ns + self.const_name self.elements.each { |e| e.add_const_namespace( ns ) } end def const_namespace "#{@title}#" end def title "#{@const_name} Class" end end class ModuleDef < ClassDef def const_namespace "#{@title}::" end def title "#{@const_name} Module" end end class Method < DocElement attr_accessor :brief, :since, :arguments, :block, :details, :returns, :const_name def initialize( data ) if Hash === data [:brief, :since, :arguments, :block, :returns, :details].each { |a| self.send( "#{a.id2name}=", data[a.id2name] ) } else raise Yod::Error, "ClassMethod must be a Hash." end end def add_const_namespace( ns ) self.const_name = ns + self.const_name end def html_file_name "class" + File::SEPARATOR + self.title.downcase.gsub( /\W+/, '_' ) + ".htm" end def title=( t ) @title = t @const_name = t end def title "#{@const_name} Method" end def index index_all = { self.title => self.html_file_name } end def method_example case self.const_name when /^(.+)#(\w+)$/ meth = $2 classObj = $1.gsub( /[A-Z]+/ ) { |s| s[0..0] + s[1..-1].downcase }.gsub( /::/, '' ) "a#{classObj}.#{meth}" else self.const_name end end def to_man( *args ) page = args.shift case page when :Page depth = args.shift if depth.zero? str = ".SH #{self.title}\n" else str = ".Sh #{self.title}\n" end str end end def to_pdf( *args ) page = args.shift case page when :TableOfContents pdf, idx = args pdf.textCRLFshow( "#{idx}. #{self.title}" ) when :Page pdf, idx = args txt = {} if Array === @arguments txt['args'] = "\n" + @arguments.collect { |p| if Array === p['type'] " (" + p['type'].join( " or " ) + ") #{p['name']}" else " (#{p['type']}) #{p['name']}" end }.join( ",\n" ) + "\n" end pdf.newline pdf.textPara( "NewCenturySchlbk-Roman", 16.0, LEFT, 0.0, 0.4, "#{idx.join('.')}. #{self.title}" ) pdf.textPara( "NewCenturySchlbk-Roman", 10.0, LEFT, 0.0, 0.2, @brief ) pdf.textPara( "Courier-Bold", 12.0, LEFT, 0.3, 0.2, "#{self.method_example}(#{txt['args']})" ) pdf.newline pdf.textPara( "NewCenturySchlbk-Roman", 14.0, LEFT, 0.0, 0.2, "Parameters" ) if Array === @arguments @arguments.each { |p| pdf.textPara( "NewCenturySchlbk-Italic", 10.0, LEFT, 0.2, 0.1, p['name'] ) pdf.textPara( "NewCenturySchlbk-Roman", 10.0, LEFT, 0.4, 0.2, p['brief'] ) } else pdf.textPara( "NewCenturySchlbk-Roman", 10.0, LEFT, 0.2, 0.2, "None" ) end if Array === @block pdf.newline pdf.textPara( "NewCenturySchlbk-Roman", 14.0, LEFT, 0.0, 0.2, "Block Parameters" ) @block.collect { |p| pdf.textPara( "NewCenturySchlbk-Italic", 10.0, LEFT, 0.2, 0.1, p['name'] ) pdf.textPara( "NewCenturySchlbk-Roman", 10.0, LEFT, 0.4, 0.2, p['brief'] ) } end pdf.newline pdf.textPara( "NewCenturySchlbk-Roman", 14.0, LEFT, 0.0, 0.2, "Returns" ) pdf.textPara( "NewCenturySchlbk-Roman", 10.0, LEFT, 0.4, 0.2, @returns || "None" ) if Array === @details pdf.newline pdf.textPara( "NewCenturySchlbk-Roman", 14.0, LEFT, 0.0, 0.2, "Details" ) @details.each { |e| e.to_pdf( pdf ) } end end end def to_chm( page ) case page when :TableOfContents return < EOF when :Html return self.to_html( :Html ) end end def to_html( page ) case page when :TableOfContents return < #{self.title} EOF when :Html ht = {} if Array === @details ht['remarks'] = <
Details
#{@details.collect { |e| e.to_html( :Html ) }}

EOF end if Array === @arguments ht['args'] = "\n" + @arguments.collect { |p| if Array === p['type'] " (" + p['type'].join( " or " ) + ") #{p['name']}" else " #{p['type']} #{p['name']}" end }.join( ",\n" ) + "\n" ht['args2'] = @arguments.collect { |p| <#{p['name']}
#{Yod.escapeHTML( p['brief'] )}
EOF }.join( "\n" ) else ht['args2'] = "
None
" end if Array === @block ht['blockvars'] = @block.collect { |p| <#{p['name']}
#{Yod.escapeHTML( p['brief'] )}
EOF }.join( "\n" ) ht['blockvars'] = <
Block Parameters
#{ht['blockvars']}
EOF end return < #{self.title}
  #{self.title}
#{@title}
#{Yod.escapeHTML( @brief )}

#{self.method_example}(#{ht['args']})

Parameters
#{ht['args2']}
#{ht['blockvars']}
Return Values
#{@returns || "None"}

#{ht['remarks']} EOF end end end class PageElement attr_accessor :text def initialize( text ) @text = text end end class Paragraph < PageElement def to_man "#{text}\n.LP\n" end def to_pdf( pdf ) pdf.textPara( "NewCenturySchlbk-Roman", 10.0, LEFT, 0.0, 0.2, "#{text}" ) end def to_html( page ) case page when :Html return < #{Yod.escapeHTML( text )}

EOF end end end class Code < PageElement def to_man "#{text['code']}\n.LP\n" end def to_pdf( pdf ) pdf.textPara( "CPDF-Monospace", 12.0, LEFT, 0.3, 0.2, text['code'] ) pdf.textPara( "NewCenturySchlbk-Italic", 10.0, CENTER, 0.3, 0.2, "Ex. #{text['no']}: #{Yod.escapeHTML( text['name'] )}" ) end def to_html( page ) case page when :Html return <#{Yod.escapeHTML( text['code'] )}
Ex. #{text['no']}: #{Yod.escapeHTML( text['name'] )}
EOF end end end class Quote < PageElement def to_man ".IP\n#{text}\n" end def to_pdf( pdf ) pdf.textPara( "NewCenturySchlbk-Italic", 10.0, LEFT, 0.3, 0.2, "#{text}" ) end def to_html( page ) case page when :Html return < #{Yod.escapeHTML( text )} EOF end end end class Title < PageElement def to_man ".Sh #{text}\n" end def to_pdf( pdf ) pdf.newline pdf.textPara( "NewCenturySchlbk-Roman", 14.0, LEFT, 0.0, 0.2, "#{text}" ) end def to_html( page ) case page when :Html return <#{Yod.escapeHTML( text )} EOF end end end YAML.add_domain_type( "yaml4r.sf.net,2003", /^yod\// ) { |type, val| type =~ /^yod\/(\w+)(?::?(\w+))?/ if Yod.const_defined?( $1 ) if $2 Yod.const_get( $1 ).new( $2, val ) else Yod.const_get( $1 ).new( val ) end else raise Yod::Error, "Invalid type #{type} not available in Yod module." end } def Yod.load( io ) YAML::load( io ) end # based on code by WATANABE Tetsuya def Yod.multi_mkdir( mpath, mask ) path = '' mpath.split( File::SEPARATOR ).each do |f| path.concat( f ) Dir.mkdir( path, mask ) unless path == '' || File.exist?( path ) path.concat( File::SEPARATOR ) end end def Yod.escapeHTML( string ) string.to_s.gsub(/&/n, '&').gsub(/\"/n, '"').gsub(/>/n, '>').gsub(/ nil, 'str' => nil, 'int' => nil, 'null' => nil, 'float' => nil, 'seq' => nil, 'map' => nil } class Error < StandardError; end # Transfer methods for okay types TRANSFER_TYPES = {} OKAY_TYPE_DOMAIN = 'okay.yaml.org,2002' OKAY_TYPE_REGEXP = /^tag:#{ Regexp::quote( OKAY_TYPE_DOMAIN ) }:([^;]+)((?:;\w+)*)$/ # # Base class for Okay types # All types should inherit this class to use modules # class ModuleBase attr_accessor :modules end # # Quick Okay type handler, handles modules # def Okay.add_type( type_name, &transfer_proc ) type_re = /^(#{Regexp::quote( type_name )})((?:;\w+)*)$/ Okay::TRANSFER_TYPES[ type_name ] = transfer_proc YAML.add_domain_type( OKAY_TYPE_DOMAIN, type_re ) { |type, val| type, mods = OKAY_TYPE_REGEXP.match( type ).to_a[1,2] unless mods.to_s.empty? mod_re = /^(#{ mods.slice( 1..-1 ).gsub( /;/, '|' ) });/ modules = {} val.reject! { |k, v| if k =~ mod_re modules[ $1 ] ||= {} modules[ $1 ][ $' ] = v true else false end } end Okay::TRANSFER_TYPES[ type ].call( type, val, modules ) } end def Okay.object_maker( obj_class, val, modules ) obj = YAML.object_maker( obj_class, val ) unless obj.respond_to?( :modules= ) raise Okay::Error, "Modules #{modules.keys.join( ',' )} can't be handled by class #{obj_class}" if modules end obj.modules = modules obj end def Okay.load_schema( schema ) schema = YAML::load( schema ) schema.each { |k,v| @@type_registry[k] = v['schema'] } end def Okay.schema( type_name ) @@type_registry[type_name] end def Okay.validate_node( node ) type, mods = OKAY_TYPE_REGEXP.match( node.type_id ).to_a[1,2] unless @@type_registry.has_key?( type ) raise Okay::Error, "Type `#{type}' not found in loaded schema." end return true if @@type_registry[ type ].nil? node_vs_schema( @@type_registry[ type ], node, [], true ) end def Okay.node_vs_schema( schema, node, depth, head ) # # Head type can be matched against a core type # type_id = head ? node.kind : node.type_id type_id = node.kind if type_id.empty? if type_id =~ /^okay\// type_id, mods = OKAY_TYPE_REGEXP.match( type_id ).to_a[1,2] end if schema.has_key?( type_id ) attr = schema[ type_id ] else raise Okay::Error, "Node of type !#{type_id} invalid at /" + depth.join( "/" ) end if @@type_registry[ type_id ] node_vs_schema( @@type_registry[ type_id ], node, depth, true ) end # # Descend and match types of nodes # if attr attr.each { |prop, prop_schema| # # Mini-paths # if prop =~ /^\// key = $' if key == "*" node.children_with_index.each { |c| node_vs_schema( prop_schema, c[0], depth + [c[1]], false ) } else if node[key] child = node[key] node_vs_schema( prop_schema, child, depth + [key], false ) else unless Array === attr['optional'] and attr['optional'].include?( "/" + key ) raise Okay::Error, "No key '#{key}' found at /" + depth.join( "/" ) end end end end } else end return true end def Okay.make_schema_flexhash( type_root ) type_root = YAML::transfer( 'tag:ruby.yaml.org,2002:flexhash', type_root ) type_root.collect! { |e| if Hash === e[1] e[1].each { |k,v| if k =~ /^\// and Array === v e[1][k] = make_schema_flexhash( v ) end } end e } type_root end Okay.add_type( 'schema' ) { |type, val, modules| val.each { |k,v| v['schema'] = make_schema_flexhash( v['schema'] ) } } end ruby-mkrf-0.2.3.orig/test/sample_files/syck-0.55/RELEASE0000644000000000000000000000600511672453175021066 0ustar rootrootreleased: { name: Syck, version: 0.54 } for: [ Ruby, PHP, Python ] by: why the lucky stiff about: > Syck is a YAML parser, an extension for scripting languages, written in C. So what is YAML? YAML is a new language for data. Describe objects in plain text. Load the data into your scripting language as arrays, dictionaries, classes, or primitives. links: YAML: http://www.yaml.org/ YAML Cookbook: http://yaml4r.sf.net/cookbook/ YAML Type Repository: http://yaml.org/type/ YAML Specification: http://yaml.org/spec/ Syck: http://www.whytheluckystiff.net/syck/ Syck Benchmarks: http://www.whytheluckystiff.net/arch/2003/03/19 status: > Syck is about 95% compliant with the YAML spec. Largely, small issues remain. The extensions are quite usable. Ruby, PHP and Python can load from a string containing YAML. Ruby has support for stream loading, type handling, YPath, Okay. This release includes an amount of Ruby code comprising the 0.60 release of YAML.rb. benchmarks: > Syck is quite speedy, although not as swift as most language's native serialization. Syck runs at about: 50-60% of the speed of Ruby's Marshal. 65-90% of the speed of PHP's deserialize(). 600% of the speed of Python's Pickle. 50-60% of the speed of Python's cPickle. (Based on various types of structured data.) installation: > Syck contains working extensions for the Ruby, PHP, and Python languages. Each requires compilation of the libsyck library, followed by compilation of the extension. To compile libsyck, first download libsyck. tar xzvf syck-0.54.tar.gz cd syck-0.54 ./configure make sudo make install To install the Ruby extension: cd ext/ruby ruby install.rb config ruby install.rb setup sudo ruby install.rb install To install the Python extension: cd ext/python python setup.py build sudo python setup.py install To install the PHP extension: sh make_module.sh sudo make install (if you weren't root during make_module.sh) php -q syck.php examples: To load this document in Ruby: | ($:~)$ irb >> require 'yaml' => true >> YAML::load( File.open( 'RELEASE' ) ) => {"status"=>"Syck is about 60% compliant ..."} To load this document in PHP: | ($:~)$ php -a Interactive mode enabled .. php then outputs .. X-Powered-By: PHP/4.2.3 Content-type: text/html Array ( [released] => Array ( [name] => Syck [version] => 0.54 ) .. and so on .. To load this document in Python: | ($:~)$ python Python 2.1.3 (#1, Jul 11 2002, 17:52:24) [GCC 2.95.3 20010315 (release) [FreeBSD]] on freebsd4 Type "copyright", "credits" or "license" for more information. >>> import syck >>> f = open( 'RELEASE' ) >>> syck.load( f.read() ) {'by': 'why the lucky stiff', ... } ruby-mkrf-0.2.3.orig/test/sample_files/syck-0.55/Makefile.in0000644000000000000000000004354411672453175022141 0ustar rootroot# Makefile.in generated by automake 1.9.5 from Makefile.am. # @configure_input@ # Copyright (C) 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002, # 2003, 2004, 2005 Free Software Foundation, Inc. # This Makefile.in is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY, to the extent permitted by law; without # even the implied warranty of MERCHANTABILITY or FITNESS FOR A # PARTICULAR PURPOSE. @SET_MAKE@ srcdir = @srcdir@ top_srcdir = @top_srcdir@ VPATH = @srcdir@ pkgdatadir = $(datadir)/@PACKAGE@ pkglibdir = $(libdir)/@PACKAGE@ pkgincludedir = $(includedir)/@PACKAGE@ top_builddir = . am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd INSTALL = @INSTALL@ install_sh_DATA = $(install_sh) -c -m 644 install_sh_PROGRAM = $(install_sh) -c install_sh_SCRIPT = $(install_sh) -c INSTALL_HEADER = $(INSTALL_DATA) transform = $(program_transform_name) NORMAL_INSTALL = : PRE_INSTALL = : POST_INSTALL = : NORMAL_UNINSTALL = : PRE_UNINSTALL = : POST_UNINSTALL = : subdir = . DIST_COMMON = README $(am__configure_deps) $(srcdir)/Makefile.am \ $(srcdir)/Makefile.in $(srcdir)/config.h.in \ $(top_srcdir)/configure COPYING TODO config/README \ config/depcomp config/install-sh config/missing ACLOCAL_M4 = $(top_srcdir)/aclocal.m4 am__aclocal_m4_deps = $(top_srcdir)/configure.in am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \ $(ACLOCAL_M4) am__CONFIG_DISTCLEAN_FILES = config.status config.cache config.log \ configure.lineno configure.status.lineno mkinstalldirs = $(install_sh) -d CONFIG_HEADER = config.h CONFIG_CLEAN_FILES = SOURCES = DIST_SOURCES = RECURSIVE_TARGETS = all-recursive check-recursive dvi-recursive \ html-recursive info-recursive install-data-recursive \ install-exec-recursive install-info-recursive \ install-recursive installcheck-recursive installdirs-recursive \ pdf-recursive ps-recursive uninstall-info-recursive \ uninstall-recursive ETAGS = etags CTAGS = ctags DIST_SUBDIRS = $(SUBDIRS) DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST) distdir = $(PACKAGE)-$(VERSION) top_distdir = $(distdir) am__remove_distdir = \ { test ! -d $(distdir) \ || { find $(distdir) -type d ! -perm -200 -exec chmod u+w {} ';' \ && rm -fr $(distdir); }; } DIST_ARCHIVES = $(distdir).tar.gz GZIP_ENV = --best distuninstallcheck_listfiles = find . -type f -print distcleancheck_listfiles = find . -type f -print ACLOCAL = @ACLOCAL@ AMDEP_FALSE = @AMDEP_FALSE@ AMDEP_TRUE = @AMDEP_TRUE@ AMTAR = @AMTAR@ AUTOCONF = @AUTOCONF@ AUTOHEADER = @AUTOHEADER@ AUTOMAKE = @AUTOMAKE@ AWK = @AWK@ CC = @CC@ CCDEPMODE = @CCDEPMODE@ CFLAGS = @CFLAGS@ CPP = @CPP@ CPPFLAGS = @CPPFLAGS@ CYGPATH_W = @CYGPATH_W@ DEFS = @DEFS@ DEPDIR = @DEPDIR@ ECHO_C = @ECHO_C@ ECHO_N = @ECHO_N@ ECHO_T = @ECHO_T@ EGREP = @EGREP@ EXEEXT = @EXEEXT@ INSTALL_DATA = @INSTALL_DATA@ INSTALL_PROGRAM = @INSTALL_PROGRAM@ INSTALL_SCRIPT = @INSTALL_SCRIPT@ INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@ LDFLAGS = @LDFLAGS@ LEX = @LEX@ LEXLIB = @LEXLIB@ LEX_OUTPUT_ROOT = @LEX_OUTPUT_ROOT@ LIBOBJS = @LIBOBJS@ LIBS = @LIBS@ LN_S = @LN_S@ LTLIBOBJS = @LTLIBOBJS@ MAKEINFO = @MAKEINFO@ OBJEXT = @OBJEXT@ PACKAGE = @PACKAGE@ PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@ PACKAGE_NAME = @PACKAGE_NAME@ PACKAGE_STRING = @PACKAGE_STRING@ PACKAGE_TARNAME = @PACKAGE_TARNAME@ PACKAGE_VERSION = @PACKAGE_VERSION@ PATH_SEPARATOR = @PATH_SEPARATOR@ RANLIB = @RANLIB@ SET_MAKE = @SET_MAKE@ SHELL = @SHELL@ STRIP = @STRIP@ VERSION = @VERSION@ YACC = @YACC@ ac_ct_CC = @ac_ct_CC@ ac_ct_RANLIB = @ac_ct_RANLIB@ ac_ct_STRIP = @ac_ct_STRIP@ am__fastdepCC_FALSE = @am__fastdepCC_FALSE@ am__fastdepCC_TRUE = @am__fastdepCC_TRUE@ am__include = @am__include@ am__leading_dot = @am__leading_dot@ am__quote = @am__quote@ am__tar = @am__tar@ am__untar = @am__untar@ bindir = @bindir@ build_alias = @build_alias@ datadir = @datadir@ exec_prefix = @exec_prefix@ host_alias = @host_alias@ includedir = @includedir@ infodir = @infodir@ install_sh = @install_sh@ libdir = @libdir@ libexecdir = @libexecdir@ localstatedir = @localstatedir@ mandir = @mandir@ mkdir_p = @mkdir_p@ oldincludedir = @oldincludedir@ prefix = @prefix@ program_transform_name = @program_transform_name@ sbindir = @sbindir@ sharedstatedir = @sharedstatedir@ sysconfdir = @sysconfdir@ target_alias = @target_alias@ # # I feel like saying, "The magic happens here!" But it doesn't. # SUBDIRS = lib tests all: config.h $(MAKE) $(AM_MAKEFLAGS) all-recursive .SUFFIXES: am--refresh: @: $(srcdir)/Makefile.in: $(srcdir)/Makefile.am $(am__configure_deps) @for dep in $?; do \ case '$(am__configure_deps)' in \ *$$dep*) \ echo ' cd $(srcdir) && $(AUTOMAKE) --foreign '; \ cd $(srcdir) && $(AUTOMAKE) --foreign \ && exit 0; \ exit 1;; \ esac; \ done; \ echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign Makefile'; \ cd $(top_srcdir) && \ $(AUTOMAKE) --foreign Makefile .PRECIOUS: Makefile Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status @case '$?' in \ *config.status*) \ echo ' $(SHELL) ./config.status'; \ $(SHELL) ./config.status;; \ *) \ echo ' cd $(top_builddir) && $(SHELL) ./config.status $@ $(am__depfiles_maybe)'; \ cd $(top_builddir) && $(SHELL) ./config.status $@ $(am__depfiles_maybe);; \ esac; $(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES) $(SHELL) ./config.status --recheck $(top_srcdir)/configure: $(am__configure_deps) cd $(srcdir) && $(AUTOCONF) $(ACLOCAL_M4): $(am__aclocal_m4_deps) cd $(srcdir) && $(ACLOCAL) $(ACLOCAL_AMFLAGS) config.h: stamp-h1 @if test ! -f $@; then \ rm -f stamp-h1; \ $(MAKE) stamp-h1; \ else :; fi stamp-h1: $(srcdir)/config.h.in $(top_builddir)/config.status @rm -f stamp-h1 cd $(top_builddir) && $(SHELL) ./config.status config.h $(srcdir)/config.h.in: $(am__configure_deps) cd $(top_srcdir) && $(AUTOHEADER) rm -f stamp-h1 touch $@ distclean-hdr: -rm -f config.h stamp-h1 uninstall-info-am: # This directory's subdirectories are mostly independent; you can cd # into them and run `make' without going through this Makefile. # To change the values of `make' variables: instead of editing Makefiles, # (1) if the variable is set in `config.status', edit `config.status' # (which will cause the Makefiles to be regenerated when you run `make'); # (2) otherwise, pass the desired values on the `make' command line. $(RECURSIVE_TARGETS): @failcom='exit 1'; \ for f in x $$MAKEFLAGS; do \ case $$f in \ *=* | --[!k]*);; \ *k*) failcom='fail=yes';; \ esac; \ done; \ dot_seen=no; \ target=`echo $@ | sed s/-recursive//`; \ list='$(SUBDIRS)'; for subdir in $$list; do \ echo "Making $$target in $$subdir"; \ if test "$$subdir" = "."; then \ dot_seen=yes; \ local_target="$$target-am"; \ else \ local_target="$$target"; \ fi; \ (cd $$subdir && $(MAKE) $(AM_MAKEFLAGS) $$local_target) \ || eval $$failcom; \ done; \ if test "$$dot_seen" = "no"; then \ $(MAKE) $(AM_MAKEFLAGS) "$$target-am" || exit 1; \ fi; test -z "$$fail" mostlyclean-recursive clean-recursive distclean-recursive \ maintainer-clean-recursive: @failcom='exit 1'; \ for f in x $$MAKEFLAGS; do \ case $$f in \ *=* | --[!k]*);; \ *k*) failcom='fail=yes';; \ esac; \ done; \ dot_seen=no; \ case "$@" in \ distclean-* | maintainer-clean-*) list='$(DIST_SUBDIRS)' ;; \ *) list='$(SUBDIRS)' ;; \ esac; \ rev=''; for subdir in $$list; do \ if test "$$subdir" = "."; then :; else \ rev="$$subdir $$rev"; \ fi; \ done; \ rev="$$rev ."; \ target=`echo $@ | sed s/-recursive//`; \ for subdir in $$rev; do \ echo "Making $$target in $$subdir"; \ if test "$$subdir" = "."; then \ local_target="$$target-am"; \ else \ local_target="$$target"; \ fi; \ (cd $$subdir && $(MAKE) $(AM_MAKEFLAGS) $$local_target) \ || eval $$failcom; \ done && test -z "$$fail" tags-recursive: list='$(SUBDIRS)'; for subdir in $$list; do \ test "$$subdir" = . || (cd $$subdir && $(MAKE) $(AM_MAKEFLAGS) tags); \ done ctags-recursive: list='$(SUBDIRS)'; for subdir in $$list; do \ test "$$subdir" = . || (cd $$subdir && $(MAKE) $(AM_MAKEFLAGS) ctags); \ done ID: $(HEADERS) $(SOURCES) $(LISP) $(TAGS_FILES) list='$(SOURCES) $(HEADERS) $(LISP) $(TAGS_FILES)'; \ unique=`for i in $$list; do \ if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ done | \ $(AWK) ' { files[$$0] = 1; } \ END { for (i in files) print i; }'`; \ mkid -fID $$unique tags: TAGS TAGS: tags-recursive $(HEADERS) $(SOURCES) config.h.in $(TAGS_DEPENDENCIES) \ $(TAGS_FILES) $(LISP) tags=; \ here=`pwd`; \ if ($(ETAGS) --etags-include --version) >/dev/null 2>&1; then \ include_option=--etags-include; \ empty_fix=.; \ else \ include_option=--include; \ empty_fix=; \ fi; \ list='$(SUBDIRS)'; for subdir in $$list; do \ if test "$$subdir" = .; then :; else \ test ! -f $$subdir/TAGS || \ tags="$$tags $$include_option=$$here/$$subdir/TAGS"; \ fi; \ done; \ list='$(SOURCES) $(HEADERS) config.h.in $(LISP) $(TAGS_FILES)'; \ unique=`for i in $$list; do \ if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ done | \ $(AWK) ' { files[$$0] = 1; } \ END { for (i in files) print i; }'`; \ if test -z "$(ETAGS_ARGS)$$tags$$unique"; then :; else \ test -n "$$unique" || unique=$$empty_fix; \ $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ $$tags $$unique; \ fi ctags: CTAGS CTAGS: ctags-recursive $(HEADERS) $(SOURCES) config.h.in $(TAGS_DEPENDENCIES) \ $(TAGS_FILES) $(LISP) tags=; \ here=`pwd`; \ list='$(SOURCES) $(HEADERS) config.h.in $(LISP) $(TAGS_FILES)'; \ unique=`for i in $$list; do \ if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ done | \ $(AWK) ' { files[$$0] = 1; } \ END { for (i in files) print i; }'`; \ test -z "$(CTAGS_ARGS)$$tags$$unique" \ || $(CTAGS) $(CTAGSFLAGS) $(AM_CTAGSFLAGS) $(CTAGS_ARGS) \ $$tags $$unique GTAGS: here=`$(am__cd) $(top_builddir) && pwd` \ && cd $(top_srcdir) \ && gtags -i $(GTAGS_ARGS) $$here distclean-tags: -rm -f TAGS ID GTAGS GRTAGS GSYMS GPATH tags distdir: $(DISTFILES) $(am__remove_distdir) mkdir $(distdir) $(mkdir_p) $(distdir)/config @srcdirstrip=`echo "$(srcdir)" | sed 's|.|.|g'`; \ topsrcdirstrip=`echo "$(top_srcdir)" | sed 's|.|.|g'`; \ list='$(DISTFILES)'; for file in $$list; do \ case $$file in \ $(srcdir)/*) file=`echo "$$file" | sed "s|^$$srcdirstrip/||"`;; \ $(top_srcdir)/*) file=`echo "$$file" | sed "s|^$$topsrcdirstrip/|$(top_builddir)/|"`;; \ esac; \ if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \ dir=`echo "$$file" | sed -e 's,/[^/]*$$,,'`; \ if test "$$dir" != "$$file" && test "$$dir" != "."; then \ dir="/$$dir"; \ $(mkdir_p) "$(distdir)$$dir"; \ else \ dir=''; \ fi; \ if test -d $$d/$$file; then \ if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \ cp -pR $(srcdir)/$$file $(distdir)$$dir || exit 1; \ fi; \ cp -pR $$d/$$file $(distdir)$$dir || exit 1; \ else \ test -f $(distdir)/$$file \ || cp -p $$d/$$file $(distdir)/$$file \ || exit 1; \ fi; \ done list='$(DIST_SUBDIRS)'; for subdir in $$list; do \ if test "$$subdir" = .; then :; else \ test -d "$(distdir)/$$subdir" \ || $(mkdir_p) "$(distdir)/$$subdir" \ || exit 1; \ distdir=`$(am__cd) $(distdir) && pwd`; \ top_distdir=`$(am__cd) $(top_distdir) && pwd`; \ (cd $$subdir && \ $(MAKE) $(AM_MAKEFLAGS) \ top_distdir="$$top_distdir" \ distdir="$$distdir/$$subdir" \ distdir) \ || exit 1; \ fi; \ done -find $(distdir) -type d ! -perm -777 -exec chmod a+rwx {} \; -o \ ! -type d ! -perm -444 -links 1 -exec chmod a+r {} \; -o \ ! -type d ! -perm -400 -exec chmod a+r {} \; -o \ ! -type d ! -perm -444 -exec $(SHELL) $(install_sh) -c -m a+r {} {} \; \ || chmod -R a+r $(distdir) dist-gzip: distdir tardir=$(distdir) && $(am__tar) | GZIP=$(GZIP_ENV) gzip -c >$(distdir).tar.gz $(am__remove_distdir) dist-bzip2: distdir tardir=$(distdir) && $(am__tar) | bzip2 -9 -c >$(distdir).tar.bz2 $(am__remove_distdir) dist-tarZ: distdir tardir=$(distdir) && $(am__tar) | compress -c >$(distdir).tar.Z $(am__remove_distdir) dist-shar: distdir shar $(distdir) | GZIP=$(GZIP_ENV) gzip -c >$(distdir).shar.gz $(am__remove_distdir) dist-zip: distdir -rm -f $(distdir).zip zip -rq $(distdir).zip $(distdir) $(am__remove_distdir) dist dist-all: distdir tardir=$(distdir) && $(am__tar) | GZIP=$(GZIP_ENV) gzip -c >$(distdir).tar.gz $(am__remove_distdir) # This target untars the dist file and tries a VPATH configuration. Then # it guarantees that the distribution is self-contained by making another # tarfile. distcheck: dist case '$(DIST_ARCHIVES)' in \ *.tar.gz*) \ GZIP=$(GZIP_ENV) gunzip -c $(distdir).tar.gz | $(am__untar) ;;\ *.tar.bz2*) \ bunzip2 -c $(distdir).tar.bz2 | $(am__untar) ;;\ *.tar.Z*) \ uncompress -c $(distdir).tar.Z | $(am__untar) ;;\ *.shar.gz*) \ GZIP=$(GZIP_ENV) gunzip -c $(distdir).shar.gz | unshar ;;\ *.zip*) \ unzip $(distdir).zip ;;\ esac chmod -R a-w $(distdir); chmod a+w $(distdir) mkdir $(distdir)/_build mkdir $(distdir)/_inst chmod a-w $(distdir) dc_install_base=`$(am__cd) $(distdir)/_inst && pwd | sed -e 's,^[^:\\/]:[\\/],/,'` \ && dc_destdir="$${TMPDIR-/tmp}/am-dc-$$$$/" \ && cd $(distdir)/_build \ && ../configure --srcdir=.. --prefix="$$dc_install_base" \ $(DISTCHECK_CONFIGURE_FLAGS) \ && $(MAKE) $(AM_MAKEFLAGS) \ && $(MAKE) $(AM_MAKEFLAGS) dvi \ && $(MAKE) $(AM_MAKEFLAGS) check \ && $(MAKE) $(AM_MAKEFLAGS) install \ && $(MAKE) $(AM_MAKEFLAGS) installcheck \ && $(MAKE) $(AM_MAKEFLAGS) uninstall \ && $(MAKE) $(AM_MAKEFLAGS) distuninstallcheck_dir="$$dc_install_base" \ distuninstallcheck \ && chmod -R a-w "$$dc_install_base" \ && ({ \ (cd ../.. && umask 077 && mkdir "$$dc_destdir") \ && $(MAKE) $(AM_MAKEFLAGS) DESTDIR="$$dc_destdir" install \ && $(MAKE) $(AM_MAKEFLAGS) DESTDIR="$$dc_destdir" uninstall \ && $(MAKE) $(AM_MAKEFLAGS) DESTDIR="$$dc_destdir" \ distuninstallcheck_dir="$$dc_destdir" distuninstallcheck; \ } || { rm -rf "$$dc_destdir"; exit 1; }) \ && rm -rf "$$dc_destdir" \ && $(MAKE) $(AM_MAKEFLAGS) dist \ && rm -rf $(DIST_ARCHIVES) \ && $(MAKE) $(AM_MAKEFLAGS) distcleancheck $(am__remove_distdir) @(echo "$(distdir) archives ready for distribution: "; \ list='$(DIST_ARCHIVES)'; for i in $$list; do echo $$i; done) | \ sed -e '1{h;s/./=/g;p;x;}' -e '$${p;x;}' distuninstallcheck: @cd $(distuninstallcheck_dir) \ && test `$(distuninstallcheck_listfiles) | wc -l` -le 1 \ || { echo "ERROR: files left after uninstall:" ; \ if test -n "$(DESTDIR)"; then \ echo " (check DESTDIR support)"; \ fi ; \ $(distuninstallcheck_listfiles) ; \ exit 1; } >&2 distcleancheck: distclean @if test '$(srcdir)' = . ; then \ echo "ERROR: distcleancheck can only run from a VPATH build" ; \ exit 1 ; \ fi @test `$(distcleancheck_listfiles) | wc -l` -eq 0 \ || { echo "ERROR: files left in build directory after distclean:" ; \ $(distcleancheck_listfiles) ; \ exit 1; } >&2 check-am: all-am check: check-recursive all-am: Makefile config.h installdirs: installdirs-recursive installdirs-am: install: install-recursive install-exec: install-exec-recursive install-data: install-data-recursive uninstall: uninstall-recursive install-am: all-am @$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am installcheck: installcheck-recursive install-strip: $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ `test -z '$(STRIP)' || \ echo "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'"` install mostlyclean-generic: clean-generic: distclean-generic: -test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES) maintainer-clean-generic: @echo "This command is intended for maintainers to use" @echo "it deletes files that may require special tools to rebuild." clean: clean-recursive clean-am: clean-generic mostlyclean-am distclean: distclean-recursive -rm -f $(am__CONFIG_DISTCLEAN_FILES) -rm -f Makefile distclean-am: clean-am distclean-generic distclean-hdr distclean-tags dvi: dvi-recursive dvi-am: html: html-recursive info: info-recursive info-am: install-data-am: install-exec-am: install-info: install-info-recursive install-man: installcheck-am: maintainer-clean: maintainer-clean-recursive -rm -f $(am__CONFIG_DISTCLEAN_FILES) -rm -rf $(top_srcdir)/autom4te.cache -rm -f Makefile maintainer-clean-am: distclean-am maintainer-clean-generic mostlyclean: mostlyclean-recursive mostlyclean-am: mostlyclean-generic pdf: pdf-recursive pdf-am: ps: ps-recursive ps-am: uninstall-am: uninstall-info-am uninstall-info: uninstall-info-recursive .PHONY: $(RECURSIVE_TARGETS) CTAGS GTAGS all all-am am--refresh check \ check-am clean clean-generic clean-recursive ctags \ ctags-recursive dist dist-all dist-bzip2 dist-gzip dist-shar \ dist-tarZ dist-zip distcheck distclean distclean-generic \ distclean-hdr distclean-recursive distclean-tags \ distcleancheck distdir distuninstallcheck dvi dvi-am html \ html-am info info-am install install-am install-data \ install-data-am install-exec install-exec-am install-info \ install-info-am install-man install-strip installcheck \ installcheck-am installdirs installdirs-am maintainer-clean \ maintainer-clean-generic maintainer-clean-recursive \ mostlyclean mostlyclean-generic mostlyclean-recursive pdf \ pdf-am ps ps-am tags tags-recursive uninstall uninstall-am \ uninstall-info-am # Tell versions [3.59,3.63) of GNU make to not export all variables. # Otherwise a system limit (for SysV at least) may be exceeded. .NOEXPORT: ruby-mkrf-0.2.3.orig/test/sample_files/syck-0.55/tests/0000755000000000000000000000000011672453175021224 5ustar rootrootruby-mkrf-0.2.3.orig/test/sample_files/syck-0.55/tests/CuTest.h0000644000000000000000000000412111672453175022602 0ustar rootroot#ifndef CU_TEST_H #define CU_TEST_H #include #include /* CuString */ char* CuStrAlloc(int size); char* CuStrCopy(char* old); #define CU_ALLOC(TYPE) ((TYPE*) malloc(sizeof(TYPE))) #define HUGE_STRING_LEN 8192 #define STRING_MAX 256 #define STRING_INC 256 typedef struct { int length; int size; char* buffer; } CuString; void CuStringInit(CuString* str); CuString* CuStringNew(void); void CuStringRead(CuString* str, char* path); void CuStringAppend(CuString* str, char* text); void CuStringAppendLen(CuString* str, char* text, long length); void CuStringAppendChar(CuString* str, char ch); void CuStringAppendFormat(CuString* str, char* format, ...); void CuStringResize(CuString* str, int newSize); /* CuTest */ typedef struct CuTest CuTest; typedef void (*TestFunction)(CuTest *); struct CuTest { char* name; TestFunction function; int failed; int ran; char* message; jmp_buf *jumpBuf; }; void CuTestInit(CuTest* t, char* name, TestFunction function); CuTest* CuTestNew(char* name, TestFunction function); void CuFail(CuTest* tc, char* message); void CuAssert(CuTest* tc, char* message, int condition); void CuAssertTrue(CuTest* tc, int condition); void CuAssertStrEquals(CuTest* tc, char* expected, char* actual); void CuAssertIntEquals(CuTest* tc, int expected, int actual); void CuAssertPtrEquals(CuTest* tc, void* expected, void* actual); void CuAssertPtrNotNull(CuTest* tc, void* pointer); void CuTestRun(CuTest* tc); /* CuSuite */ #define MAX_TEST_CASES 1024 #define SUITE_ADD_TEST(SUITE,TEST) CuSuiteAdd(SUITE, CuTestNew(#TEST, TEST)) typedef struct { int count; CuTest* list[MAX_TEST_CASES]; int failCount; } CuSuite; void CuSuiteInit(CuSuite* testSuite); CuSuite* CuSuiteNew(); void CuSuiteAdd(CuSuite* testSuite, CuTest *testCase); void CuSuiteAddSuite(CuSuite* testSuite, CuSuite* testSuite2); void CuSuiteRun(CuSuite* testSuite); void CuSuiteSummary(CuSuite* testSuite, CuString* summary); void CuSuiteDetails(CuSuite* testSuite, CuString* details); #endif /* CU_TEST_H */ ruby-mkrf-0.2.3.orig/test/sample_files/syck-0.55/tests/Emit.c0000644000000000000000000000512511672453175022271 0ustar rootroot// // Emit.c // // $Author: why $ // $Date: 2005/01/06 07:23:51 $ // // Copyright (C) 2003 why the lucky stiff // #include #include "syck.h" #include "CuTest.h" // // 1. Test the buffering -- print 10 bytes at a time // void TestSyckEmit_Output( SyckEmitter *e, char *str, long len ) { // char *tmp = syck_strndup( str, len ); // printf( "OUT: %s [%d]\n", tmp, len ); // S_FREE( tmp ); } void TestSyckEmit( CuTest *tc ) { SyckEmitter *emitter; char *tmp; int len = 0; emitter = syck_new_emitter(); emitter->bufsize = 10; emitter->output_handler = TestSyckEmit_Output; syck_emitter_write( emitter, "Test [1]", 8 ); syck_emitter_write( emitter, ".", 1 ); syck_emitter_write( emitter, "Test [2]", 8 ); syck_emitter_write( emitter, ".", 1 ); syck_emitter_write( emitter, "Test [3]", 8 ); syck_emitter_write( emitter, ".", 1 ); syck_emitter_write( emitter, "Test [4]", 8 ); syck_emitter_write( emitter, ".", 1 ); syck_emitter_write( emitter, "END!", 4 ); syck_free_emitter( emitter ); } /* * Ensure that our base64 encoder can do some basic * binary encoding. */ void TestBase64Encode( CuTest *tc ) { char gif[] = "GIF89a\f\000\f\000\204\000\000\377\377\367\365\365\356\351\351\345fff\000\000\000\347\347\347^^^\363\363\355\216\216\216\340\340\340\237\237\237\223\223\223\247\247\247\236\236\236iiiccc\243\243\243\204\204\204\377\376\371\377\376\371\377\376\371\377\376\371\377\376\371\377\376\371\377\376\371\377\376\371\377\376\371\377\376\371\377\376\371\377\376\371\377\376\371\377\376\371!\376\016Made with GIMP\000,\000\000\000\000\f\000\f\000\000\005, \216\2010\236\343@\024\350i\020\304\321\212\010\034\317\200M$z\357\3770\205p\270\2601f\r\e\316\001\303\001\036\020' \202\n\001\000;"; char *enc = syck_base64enc( gif, 185 ); CuAssertStrEquals( tc, enc, "R0lGODlhDAAMAIQAAP//9/X17unp5WZmZgAAAOfn515eXvPz7Y6OjuDg4J+fn5OTk6enp56enmlpaWNjY6Ojo4SEhP/++f/++f/++f/++f/++f/++f/++f/++f/++f/++f/++f/++f/++f/++SH+Dk1hZGUgd2l0aCBHSU1QACwAAAAADAAMAAAFLCAgjoEwnuNAFOhpEMTRiggcz4BNJHrv/zCFcLiwMWYNG84BwwEeECcgggoBADs=\n" ); S_FREE( enc ); } CuSuite * SyckGetSuite() { CuSuite *suite = CuSuiteNew(); SUITE_ADD_TEST( suite, TestSyckEmit ); SUITE_ADD_TEST( suite, TestBase64Encode ); return suite; } int main(void) { CuString *output = CuStringNew(); CuSuite* suite = SyckGetSuite(); int count; CuSuiteRun(suite); CuSuiteSummary(suite, output); CuSuiteDetails(suite, output); printf("%s\n", output->buffer); count = suite->failCount; CuStringFree( output ); CuSuiteFree( suite ); return count; } ruby-mkrf-0.2.3.orig/test/sample_files/syck-0.55/tests/YTS.c0000644000000000000000000014363211672453175022060 0ustar rootroot// // YTS.c // // $Author: why $ // $Date: 2005/04/13 06:27:54 $ // // Copyright (C) 2004 why the lucky stiff // // Well, this is the Yaml Testing Suite in the form of a plain C // API. Basically, this is as good as C integration gets for Syck. // You've got to have a symbol table around. From there, you can // query your data. // #include #include "syck.h" #include "CuTest.h" /* YAML test node structures */ #define T_STR 10 #define T_SEQ 20 #define T_MAP 30 #define T_END 40 #define ILEN 2 struct test_node { int type; char *tag; char *key; struct test_node *value; }; struct test_node end_node = { T_END }; /* * Assertion which compares a YAML document with an * equivalent set of test_node structs. */ SYMID syck_copy_handler(p, n) SyckParser *p; SyckNode *n; { int i = 0; struct test_node *tn = S_ALLOC_N( struct test_node, 1 ); switch ( n->kind ) { case syck_str_kind: tn->type = T_STR; tn->key = syck_strndup( n->data.str->ptr, n->data.str->len ); tn->value = 0; break; case syck_seq_kind: { struct test_node *val; struct test_node *seq = S_ALLOC_N( struct test_node, n->data.list->idx + 1 ); tn->type = T_SEQ; tn->key = 0; for ( i = 0; i < n->data.list->idx; i++ ) { SYMID oid = syck_seq_read( n, i ); syck_lookup_sym( p, oid, (char **)&val ); seq[i] = val[0]; } seq[n->data.list->idx] = end_node; tn->value = seq; } break; case syck_map_kind: { struct test_node *val; struct test_node *map = S_ALLOC_N( struct test_node, ( n->data.pairs->idx * 2 ) + 1 ); tn->type = T_MAP; tn->key = 0; for ( i = 0; i < n->data.pairs->idx; i++ ) { SYMID oid = syck_map_read( n, map_key, i ); syck_lookup_sym( p, oid, (char **)&val ); map[i * 2] = val[0]; oid = syck_map_read( n, map_value, i ); syck_lookup_sym( p, oid, (char **)&val ); map[(i * 2) + 1] = val[0]; } map[n->data.pairs->idx * 2] = end_node; tn->value = map; } break; } tn->tag = 0; if ( n->type_id != NULL ) { tn->tag = syck_strndup( n->type_id, strlen( n->type_id ) ); } return syck_add_sym( p, (char *) tn ); } int syck_free_copies( char *key, struct test_node *tn, char *arg ) { if ( tn != NULL ) { switch ( tn->type ) { case T_STR: S_FREE( tn->key ); break; case T_SEQ: case T_MAP: S_FREE( tn->value ); break; } if ( tn->tag != NULL ) S_FREE( tn->tag ); S_FREE( tn ); } tn = NULL; return ST_CONTINUE; } void CuStreamCompareX( CuTest* tc, struct test_node *s1, struct test_node *s2 ) { int i = 0; while ( 1 ) { CuAssertIntEquals( tc, s1[i].type, s2[i].type ); if ( s1[i].type == T_END ) return; if ( s1[i].tag != 0 && s2[i].tag != 0 ) CuAssertStrEquals( tc, s1[i].tag, s2[i].tag ); switch ( s1[i].type ) { case T_STR: CuAssertStrEquals( tc, s1[i].key, s2[i].key ); break; case T_SEQ: case T_MAP: CuStreamCompareX( tc, s1[i].value, s2[i].value ); break; } i++; } } void CuStreamCompare( CuTest* tc, char *yaml, struct test_node *stream ) { int doc_ct = 0; struct test_node *ystream = S_ALLOC_N( struct test_node, doc_ct + 1 ); CuString *msg; /* Set up parser */ SyckParser *parser = syck_new_parser(); syck_parser_str_auto( parser, yaml, NULL ); syck_parser_handler( parser, syck_copy_handler ); syck_parser_error_handler( parser, NULL ); syck_parser_implicit_typing( parser, 1 ); syck_parser_taguri_expansion( parser, 1 ); /* Parse all streams */ while ( 1 ) { struct test_node *ydoc; SYMID oid = syck_parse( parser ); if ( parser->eof == 1 ) break; /* Add document to stream */ syck_lookup_sym( parser, oid, (char **)&ydoc ); ystream[doc_ct] = ydoc[0]; doc_ct++; S_REALLOC_N( ystream, struct test_node, doc_ct + 1 ); } ystream[doc_ct] = end_node; /* Traverse the struct and the symbol table side-by-side */ /* DEBUG: y( stream, 0 ); y( ystream, 0 ); */ CuStreamCompareX( tc, stream, ystream ); /* Free the node tables and the parser */ S_FREE( ystream ); if ( parser->syms != NULL ) st_foreach( parser->syms, syck_free_copies, 0 ); syck_free_parser( parser ); } /* * Setup for testing N->Y->N. */ void test_output_handler( emitter, str, len ) SyckEmitter *emitter; char *str; long len; { CuString *dest = (CuString *)emitter->bonus; CuStringAppendLen( dest, str, len ); } SYMID build_symbol_table( SyckEmitter *emitter, struct test_node *node ) { switch ( node->type ) { case T_SEQ: case T_MAP: { int i = 0; while ( node->value[i].type != T_END ) { SYMID id = build_symbol_table( emitter, &node->value[i] ); i++; } } return syck_emitter_mark_node( emitter, (st_data_t)node ); case T_STR: return 0; } } void test_emitter_handler( SyckEmitter *emitter, st_data_t data ) { struct test_node *node = (struct test_node *)data; switch ( node->type ) { case T_STR: syck_emit_scalar( emitter, node->tag, scalar_none, 0, 0, 0, node->key, strlen( node->key ) ); break; case T_SEQ: { int i = 0; syck_emit_seq( emitter, node->tag, 1 ); while ( node->value[i].type != T_END ) { syck_emit_item( emitter, (st_data_t)&node->value[i] ); i++; } syck_emit_end( emitter ); } break; case T_MAP: { int i = 0; syck_emit_map( emitter, node->tag, 1 ); while ( node->value[i].type != T_END ) { syck_emit_item( emitter, (st_data_t)&node->value[i] ); i++; } syck_emit_end( emitter ); } break; } } void CuRoundTrip( CuTest* tc, struct test_node *stream ) { int i = 0; CuString *cs = CuStringNew(); SyckEmitter *emitter = syck_new_emitter(); /* Calculate anchors and tags */ build_symbol_table( emitter, stream ); /* Build the stream */ syck_output_handler( emitter, test_output_handler ); syck_emitter_handler( emitter, test_emitter_handler ); emitter->bonus = cs; while ( stream[i].type != T_END ) { syck_emit( emitter, (st_data_t)&stream[i] ); syck_emitter_flush( emitter, 0 ); i++; } /* Reload the stream and compare */ /* printf( "-- output for %s --\n%s\n--- end of output --\n", tc->name, cs->buffer ); */ CuStreamCompare( tc, cs->buffer, stream ); CuStringFree( cs ); syck_free_emitter( emitter ); } /* * ACTUAL TESTS FOR THE YAML TESTING SUITE BEGIN HERE * (EVERYTHING PREVIOUS WAS SET UP FOR THE TESTS) */ /* * Example 2.1: Sequence of scalars */ void YtsSpecificationExamples_0( CuTest *tc ) { struct test_node seq[] = { { T_STR, 0, "Mark McGwire" }, { T_STR, 0, "Sammy Sosa" }, { T_STR, 0, "Ken Griffey" }, end_node }; struct test_node stream[] = { { T_SEQ, 0, 0, seq }, end_node }; CuStreamCompare( tc, /* YAML document */ "- Mark McGwire \n" "- Sammy Sosa \n" "- Ken Griffey \n" , /* C structure of validations */ stream ); CuRoundTrip( tc, stream ); } /* * Example 2.2: Mapping of scalars to scalars */ void YtsSpecificationExamples_1( CuTest *tc ) { struct test_node map[] = { { T_STR, 0, "hr" }, { T_STR, 0, "65" }, { T_STR, 0, "avg" }, { T_STR, 0, "0.278" }, { T_STR, 0, "rbi" }, { T_STR, 0, "147" }, end_node }; struct test_node stream[] = { { T_MAP, 0, 0, map }, end_node }; CuStreamCompare( tc, /* YAML document */ "hr: 65 \n" "avg: 0.278 \n" "rbi: 147 \n" , /* C structure of validations */ stream ); CuRoundTrip( tc, stream ); } /* * Example 2.3: Mapping of scalars to sequences */ void YtsSpecificationExamples_2( CuTest *tc ) { struct test_node seq1[] = { { T_STR, 0, "Boston Red Sox" }, { T_STR, 0, "Detroit Tigers" }, { T_STR, 0, "New York Yankees" }, end_node }; struct test_node seq2[] = { { T_STR, 0, "New York Mets" }, { T_STR, 0, "Chicago Cubs" }, { T_STR, 0, "Atlanta Braves" }, end_node }; struct test_node map[] = { { T_STR, 0, "american" }, { T_SEQ, 0, 0, seq1 }, { T_STR, 0, "national" }, { T_SEQ, 0, 0, seq2 }, end_node }; struct test_node stream[] = { { T_MAP, 0, 0, map }, end_node }; CuStreamCompare( tc, /* YAML document */ "american: \n" " - Boston Red Sox \n" " - Detroit Tigers \n" " - New York Yankees \n" "national: \n" " - New York Mets \n" " - Chicago Cubs \n" " - Atlanta Braves \n" , /* C structure of validations */ stream ); CuRoundTrip( tc, stream ); } /* * Example 2.4: Sequence of mappings */ void YtsSpecificationExamples_3( CuTest *tc ) { struct test_node map1[] = { { T_STR, 0, "name" }, { T_STR, 0, "Mark McGwire" }, { T_STR, 0, "hr" }, { T_STR, 0, "65" }, { T_STR, 0, "avg" }, { T_STR, 0, "0.278" }, end_node }; struct test_node map2[] = { { T_STR, 0, "name" }, { T_STR, 0, "Sammy Sosa" }, { T_STR, 0, "hr" }, { T_STR, 0, "63" }, { T_STR, 0, "avg" }, { T_STR, 0, "0.288" }, end_node }; struct test_node seq[] = { { T_MAP, 0, 0, map1 }, { T_MAP, 0, 0, map2 }, end_node }; struct test_node stream[] = { { T_SEQ, 0, 0, seq }, end_node }; CuStreamCompare( tc, /* YAML document */ "- \n" " name: Mark McGwire \n" " hr: 65 \n" " avg: 0.278 \n" "- \n" " name: Sammy Sosa \n" " hr: 63 \n" " avg: 0.288 \n" , /* C structure of validations */ stream ); CuRoundTrip( tc, stream ); } /* * Example legacy_A5: Legacy A5 */ void YtsSpecificationExamples_4( CuTest *tc ) { struct test_node seq1[] = { { T_STR, 0, "New York Yankees" }, { T_STR, 0, "Atlanta Braves" }, end_node }; struct test_node seq2[] = { { T_STR, 0, "2001-07-02" }, { T_STR, 0, "2001-08-12" }, { T_STR, 0, "2001-08-14" }, end_node }; struct test_node seq3[] = { { T_STR, 0, "Detroit Tigers" }, { T_STR, 0, "Chicago Cubs" }, end_node }; struct test_node seq4[] = { { T_STR, 0, "2001-07-23" }, end_node }; struct test_node map[] = { { T_SEQ, 0, 0, seq1 }, { T_SEQ, 0, 0, seq2 }, { T_SEQ, 0, 0, seq3 }, { T_SEQ, 0, 0, seq4 }, end_node }; struct test_node stream[] = { { T_MAP, 0, 0, map }, end_node }; CuStreamCompare( tc, /* YAML document */ "? \n" " - New York Yankees \n" " - Atlanta Braves \n" ": \n" " - 2001-07-02 \n" " - 2001-08-12 \n" " - 2001-08-14 \n" "? \n" " - Detroit Tigers \n" " - Chicago Cubs \n" ": \n" " - 2001-07-23 \n" , /* C structure of validations */ stream ); CuRoundTrip( tc, stream ); } /* * Example 2.5: Sequence of sequences */ void YtsSpecificationExamples_5( CuTest *tc ) { struct test_node seq1[] = { { T_STR, 0, "name" }, { T_STR, 0, "hr" }, { T_STR, 0, "avg" }, end_node }; struct test_node seq2[] = { { T_STR, 0, "Mark McGwire" }, { T_STR, 0, "65" }, { T_STR, 0, "0.278" }, end_node }; struct test_node seq3[] = { { T_STR, 0, "Sammy Sosa" }, { T_STR, 0, "63" }, { T_STR, 0, "0.288" }, end_node }; struct test_node seq[] = { { T_SEQ, 0, 0, seq1 }, { T_SEQ, 0, 0, seq2 }, { T_SEQ, 0, 0, seq3 }, end_node }; struct test_node stream[] = { { T_SEQ, 0, 0, seq }, end_node }; CuStreamCompare( tc, /* YAML document */ "- [ name , hr , avg ] \n" "- [ Mark McGwire , 65 , 0.278 ] \n" "- [ Sammy Sosa , 63 , 0.288 ] \n" , /* C structure of validations */ stream ); CuRoundTrip( tc, stream ); } /* * Example 2.6: Mapping of mappings */ void YtsSpecificationExamples_6( CuTest *tc ) { struct test_node map1[] = { { T_STR, 0, "hr" }, { T_STR, 0, "65" }, { T_STR, 0, "avg" }, { T_STR, 0, "0.278" }, end_node }; struct test_node map2[] = { { T_STR, 0, "hr" }, { T_STR, 0, "63" }, { T_STR, 0, "avg" }, { T_STR, 0, "0.288" }, end_node }; struct test_node map[] = { { T_STR, 0, "Mark McGwire" }, { T_MAP, 0, 0, map1 }, { T_STR, 0, "Sammy Sosa" }, { T_MAP, 0, 0, map2 }, end_node }; struct test_node stream[] = { { T_MAP, 0, 0, map }, end_node }; CuStreamCompare( tc, /* YAML document */ "Mark McGwire: {hr: 65, avg: 0.278}\n" "Sammy Sosa: {\n" " hr: 63,\n" " avg: 0.288\n" " }\n" , /* C structure of validations */ stream ); CuRoundTrip( tc, stream ); } /* * Example 2.7: Two documents in a stream each with a leading comment */ void YtsSpecificationExamples_7( CuTest *tc ) { struct test_node seq1[] = { { T_STR, 0, "Mark McGwire" }, { T_STR, 0, "Sammy Sosa" }, { T_STR, 0, "Ken Griffey" }, end_node }; struct test_node seq2[] = { { T_STR, 0, "Chicago Cubs" }, { T_STR, 0, "St Louis Cardinals" }, end_node }; struct test_node stream[] = { { T_SEQ, 0, 0, seq1 }, { T_SEQ, 0, 0, seq2 }, end_node }; CuStreamCompare( tc, /* YAML document */ "# Ranking of 1998 home runs\n" "---\n" "- Mark McGwire\n" "- Sammy Sosa\n" "- Ken Griffey\n" "\n" "# Team ranking\n" "---\n" "- Chicago Cubs\n" "- St Louis Cardinals\n" , /* C structure of validations */ stream ); CuRoundTrip( tc, stream ); } /* * Example 2.8: Play by play feed from a game */ void YtsSpecificationExamples_8( CuTest *tc ) { struct test_node map1[] = { { T_STR, 0, "time" }, { T_STR, 0, "20:03:20" }, { T_STR, 0, "player" }, { T_STR, 0, "Sammy Sosa" }, { T_STR, 0, "action" }, { T_STR, 0, "strike (miss)" }, end_node }; struct test_node map2[] = { { T_STR, 0, "time" }, { T_STR, 0, "20:03:47" }, { T_STR, 0, "player" }, { T_STR, 0, "Sammy Sosa" }, { T_STR, 0, "action" }, { T_STR, 0, "grand slam" }, end_node }; struct test_node stream[] = { { T_MAP, 0, 0, map1 }, { T_MAP, 0, 0, map2 }, end_node }; CuStreamCompare( tc, /* YAML document */ "---\n" "time: 20:03:20\n" "player: Sammy Sosa\n" "action: strike (miss)\n" "...\n" "---\n" "time: 20:03:47\n" "player: Sammy Sosa\n" "action: grand slam\n" "...\n" , /* C structure of validations */ stream ); CuRoundTrip( tc, stream ); } /* * Example 2.9: Single document with two comments */ void YtsSpecificationExamples_9( CuTest *tc ) { struct test_node seq1[] = { { T_STR, 0, "Mark McGwire" }, { T_STR, 0, "Sammy Sosa" }, end_node }; struct test_node seq2[] = { { T_STR, 0, "Sammy Sosa" }, { T_STR, 0, "Ken Griffey" }, end_node }; struct test_node map[] = { { T_STR, 0, "hr" }, { T_SEQ, 0, 0, seq1 }, { T_STR, 0, "rbi" }, { T_SEQ, 0, 0, seq2 }, end_node }; struct test_node stream[] = { { T_MAP, 0, 0, map }, end_node }; CuStreamCompare( tc, /* YAML document */ "hr: # 1998 hr ranking \n" " - Mark McGwire \n" " - Sammy Sosa \n" "rbi: \n" " # 1998 rbi ranking \n" " - Sammy Sosa \n" " - Ken Griffey \n" , /* C structure of validations */ stream ); CuRoundTrip( tc, stream ); } /* * Example 2.1: Node for Sammy Sosa appears twice in this document */ void YtsSpecificationExamples_10( CuTest *tc ) { struct test_node seq1[] = { { T_STR, 0, "Mark McGwire" }, { T_STR, 0, "Sammy Sosa" }, end_node }; struct test_node seq2[] = { { T_STR, 0, "Sammy Sosa" }, { T_STR, 0, "Ken Griffey" }, end_node }; struct test_node map[] = { { T_STR, 0, "hr" }, { T_SEQ, 0, 0, seq1 }, { T_STR, 0, "rbi" }, { T_SEQ, 0, 0, seq2 }, end_node }; struct test_node stream[] = { { T_MAP, 0, 0, map }, end_node }; CuStreamCompare( tc, /* YAML document */ "---\n" "hr: \n" " - Mark McGwire \n" " # Following node labeled SS \n" " - &SS Sammy Sosa \n" "rbi: \n" " - *SS # Subsequent occurance \n" " - Ken Griffey \n" , /* C structure of validations */ stream ); CuRoundTrip( tc, stream ); } /* * Example 2.11: Mapping between sequences */ void YtsSpecificationExamples_11( CuTest *tc ) { struct test_node seq1[] = { { T_STR, 0, "New York Yankees" }, { T_STR, 0, "Atlanta Braves" }, end_node }; struct test_node seq2[] = { { T_STR, 0, "2001-07-02" }, { T_STR, 0, "2001-08-12" }, { T_STR, 0, "2001-08-14" }, end_node }; struct test_node seq3[] = { { T_STR, 0, "Detroit Tigers" }, { T_STR, 0, "Chicago Cubs" }, end_node }; struct test_node seq4[] = { { T_STR, 0, "2001-07-23" }, end_node }; struct test_node map[] = { { T_SEQ, 0, 0, seq3 }, { T_SEQ, 0, 0, seq4 }, { T_SEQ, 0, 0, seq1 }, { T_SEQ, 0, 0, seq2 }, end_node }; struct test_node stream[] = { { T_MAP, 0, 0, map }, end_node }; CuStreamCompare( tc, /* YAML document */ "? # PLAY SCHEDULE \n" " - Detroit Tigers \n" " - Chicago Cubs \n" ": \n" " - 2001-07-23 \n" "\n" "? [ New York Yankees, \n" " Atlanta Braves ] \n" ": [ 2001-07-02, 2001-08-12, \n" " 2001-08-14 ] \n" , /* C structure of validations */ stream ); CuRoundTrip( tc, stream ); } /* * Example 2.12: Sequence key shortcut */ void YtsSpecificationExamples_12( CuTest *tc ) { struct test_node map1[] = { { T_STR, 0, "item" }, { T_STR, 0, "Super Hoop" }, { T_STR, 0, "quantity" }, { T_STR, 0, "1" }, end_node }; struct test_node map2[] = { { T_STR, 0, "item" }, { T_STR, 0, "Basketball" }, { T_STR, 0, "quantity" }, { T_STR, 0, "4" }, end_node }; struct test_node map3[] = { { T_STR, 0, "item" }, { T_STR, 0, "Big Shoes" }, { T_STR, 0, "quantity" }, { T_STR, 0, "1" }, end_node }; struct test_node seq[] = { { T_MAP, 0, 0, map1 }, { T_MAP, 0, 0, map2 }, { T_MAP, 0, 0, map3 }, end_node }; struct test_node stream[] = { { T_SEQ, 0, 0, seq }, end_node }; CuStreamCompare( tc, /* YAML document */ "---\n" "# products purchased\n" "- item : Super Hoop\n" " quantity: 1\n" "- item : Basketball\n" " quantity: 4\n" "- item : Big Shoes\n" " quantity: 1\n" , /* C structure of validations */ stream ); CuRoundTrip( tc, stream ); } /* * Example 2.13: Literal perserves newlines */ void YtsSpecificationExamples_13( CuTest *tc ) { struct test_node stream[] = { { T_STR, 0, "\\//||\\/||\n// || ||_\n" }, end_node }; CuStreamCompare( tc, /* YAML document */ "# ASCII Art\n" "--- | \n" " \\//||\\/||\n" " // || ||_\n" , /* C structure of validations */ stream ); CuRoundTrip( tc, stream ); } /* * Example 2.14: Folded treats newlines as a space */ void YtsSpecificationExamples_14( CuTest *tc ) { struct test_node stream[] = { { T_STR, 0, "Mark McGwire's year was crippled by a knee injury." }, end_node }; CuStreamCompare( tc, /* YAML document */ "---\n" " Mark McGwire's\n" " year was crippled\n" " by a knee injury.\n" , /* C structure of validations */ stream ); CuRoundTrip( tc, stream ); } /* * Example 2.15: Newlines preserved for indented and blank lines */ void YtsSpecificationExamples_15( CuTest *tc ) { struct test_node stream[] = { { T_STR, 0, "Sammy Sosa completed another fine season with great stats.\n\n 63 Home Runs\n 0.288 Batting Average\n\nWhat a year!\n" }, end_node }; CuStreamCompare( tc, /* YAML document */ "--- > \n" " Sammy Sosa completed another\n" " fine season with great stats.\n" "\n" " 63 Home Runs\n" " 0.288 Batting Average\n" "\n" " What a year!\n" , /* C structure of validations */ stream ); CuRoundTrip( tc, stream ); } /* * Example 2.16: Indentation determines scope */ void YtsSpecificationExamples_16( CuTest *tc ) { struct test_node map[] = { { T_STR, 0, "name" }, { T_STR, 0, "Mark McGwire" }, { T_STR, 0, "accomplishment" }, { T_STR, 0, "Mark set a major league home run record in 1998.\n" }, { T_STR, 0, "stats" }, { T_STR, 0, "65 Home Runs\n0.278 Batting Average\n" }, end_node }; struct test_node stream[] = { { T_MAP, 0, 0, map }, end_node }; CuStreamCompare( tc, /* YAML document */ "name: Mark McGwire \n" "accomplishment: > \n" " Mark set a major league\n" " home run record in 1998.\n" "stats: | \n" " 65 Home Runs\n" " 0.278 Batting Average\n" , /* C structure of validations */ stream ); CuRoundTrip( tc, stream ); } /* * Example 2.18: Multiline flow scalars */ void YtsSpecificationExamples_18( CuTest *tc ) { struct test_node map[] = { { T_STR, 0, "plain" }, { T_STR, 0, "This unquoted scalar spans many lines." }, { T_STR, 0, "quoted" }, { T_STR, 0, "So does this quoted scalar.\n" }, end_node }; struct test_node stream[] = { { T_MAP, 0, 0, map }, end_node }; CuStreamCompare( tc, /* YAML document */ "plain:\n" " This unquoted scalar\n" " spans many lines.\n" "\n" "quoted: \"So does this\n" " quoted scalar.\\n\"\n" , /* C structure of validations */ stream ); CuRoundTrip( tc, stream ); } /* * Example 2.19: Integers */ void YtsSpecificationExamples_19( CuTest *tc ) { struct test_node map[] = { { T_STR, 0, "canonical" }, { T_STR, 0, "12345" }, { T_STR, 0, "decimal" }, { T_STR, 0, "+12,345" }, { T_STR, 0, "sexagecimal" }, { T_STR, 0, "3:25:45" }, { T_STR, 0, "octal" }, { T_STR, 0, "014" }, { T_STR, 0, "hexadecimal" }, { T_STR, 0, "0xC" }, end_node }; struct test_node stream[] = { { T_MAP, 0, 0, map }, end_node }; CuStreamCompare( tc, /* YAML document */ "canonical: 12345 \n" "decimal: +12,345 \n" "sexagecimal: 3:25:45\n" "octal: 014 \n" "hexadecimal: 0xC \n" , /* C structure of validations */ stream ); CuRoundTrip( tc, stream ); } /* * Example 2.2: Floating point */ void YtsSpecificationExamples_20( CuTest *tc ) { struct test_node map[] = { { T_STR, 0, "canonical" }, { T_STR, 0, "1.23015e+3" }, { T_STR, 0, "exponential" }, { T_STR, 0, "12.3015e+02" }, { T_STR, 0, "sexagecimal" }, { T_STR, 0, "20:30.15" }, { T_STR, 0, "fixed" }, { T_STR, 0, "1,230.15" }, { T_STR, 0, "negative infinity" }, { T_STR, 0, "-.inf" }, { T_STR, 0, "not a number" }, { T_STR, 0, ".NaN" }, end_node }; struct test_node stream[] = { { T_MAP, 0, 0, map }, end_node }; CuStreamCompare( tc, /* YAML document */ "canonical: 1.23015e+3 \n" "exponential: 12.3015e+02 \n" "sexagecimal: 20:30.15\n" "fixed: 1,230.15 \n" "negative infinity: -.inf\n" "not a number: .NaN \n" , /* C structure of validations */ stream ); CuRoundTrip( tc, stream ); } /* * Example 2.21: Miscellaneous */ void YtsSpecificationExamples_21( CuTest *tc ) { struct test_node map[] = { { T_STR, 0, "null" }, { T_STR, 0, "~" }, { T_STR, 0, "true" }, { T_STR, 0, "y" }, { T_STR, 0, "false" }, { T_STR, 0, "n" }, { T_STR, 0, "string" }, { T_STR, 0, "12345" }, end_node }; struct test_node stream[] = { { T_MAP, 0, 0, map }, end_node }; CuStreamCompare( tc, /* YAML document */ "null: ~ \n" "true: y\n" "false: n \n" "string: '12345' \n" , /* C structure of validations */ stream ); CuRoundTrip( tc, stream ); } /* * Example 2.22: Timestamps */ void YtsSpecificationExamples_22( CuTest *tc ) { struct test_node map[] = { { T_STR, 0, "canonical" }, { T_STR, 0, "2001-12-15T02:59:43.1Z" }, { T_STR, 0, "iso8601" }, { T_STR, 0, "2001-12-14t21:59:43.10-05:00" }, { T_STR, 0, "spaced" }, { T_STR, 0, "2001-12-14 21:59:43.10 -05:00" }, { T_STR, 0, "date" }, { T_STR, 0, "2002-12-14" }, end_node }; struct test_node stream[] = { { T_MAP, 0, 0, map }, end_node }; CuStreamCompare( tc, /* YAML document */ "canonical: 2001-12-15T02:59:43.1Z\n" "iso8601: 2001-12-14t21:59:43.10-05:00\n" "spaced: 2001-12-14 21:59:43.10 -05:00\n" "date: 2002-12-14 # Time is noon UTC\n" , /* C structure of validations */ stream ); CuRoundTrip( tc, stream ); } /* * Example legacy D4: legacy Timestamps test */ void YtsSpecificationExamples_23( CuTest *tc ) { struct test_node map[] = { { T_STR, 0, "canonical" }, { T_STR, 0, "2001-12-15T02:59:43.00Z" }, { T_STR, 0, "iso8601" }, { T_STR, 0, "2001-02-28t21:59:43.00-05:00" }, { T_STR, 0, "spaced" }, { T_STR, 0, "2001-12-14 21:59:43.00 -05:00" }, { T_STR, 0, "date" }, { T_STR, 0, "2002-12-14" }, end_node }; struct test_node stream[] = { { T_MAP, 0, 0, map }, end_node }; CuStreamCompare( tc, /* YAML document */ "canonical: 2001-12-15T02:59:43.00Z\n" "iso8601: 2001-02-28t21:59:43.00-05:00\n" "spaced: 2001-12-14 21:59:43.00 -05:00\n" "date: 2002-12-14\n" , /* C structure of validations */ stream ); CuRoundTrip( tc, stream ); } /* * Example 2.23: Various explicit families */ void YtsSpecificationExamples_24( CuTest *tc ) { struct test_node map[] = { { T_STR, 0, "not-date" }, { T_STR, "tag:yaml.org,2002:str", "2002-04-28" }, { T_STR, 0, "picture" }, { T_STR, "tag:yaml.org,2002:binary", "R0lGODlhDAAMAIQAAP//9/X\n17unp5WZmZgAAAOfn515eXv\nPz7Y6OjuDg4J+fn5OTk6enp\n56enmleECcgggoBADs=\n" }, { T_STR, 0, "application specific tag" }, { T_STR, "x-private:something", "The semantics of the tag\nabove may be different for\ndifferent documents.\n" }, end_node }; struct test_node stream[] = { { T_MAP, 0, 0, map }, end_node }; CuStreamCompare( tc, /* YAML document */ "not-date: !str 2002-04-28\n" "picture: !binary |\n" " R0lGODlhDAAMAIQAAP//9/X\n" " 17unp5WZmZgAAAOfn515eXv\n" " Pz7Y6OjuDg4J+fn5OTk6enp\n" " 56enmleECcgggoBADs=\n" "\n" "application specific tag: !!something |\n" " The semantics of the tag\n" " above may be different for\n" " different documents.\n" , /* C structure of validations */ stream ); CuRoundTrip( tc, stream ); } /* * Example 2.24: Application specific family */ void YtsSpecificationExamples_25( CuTest *tc ) { struct test_node point1[] = { { T_STR, 0, "x" }, { T_STR, 0, "73" }, { T_STR, 0, "y" }, { T_STR, 0, "129" }, end_node }; struct test_node point2[] = { { T_STR, 0, "x" }, { T_STR, 0, "89" }, { T_STR, 0, "y" }, { T_STR, 0, "102" }, end_node }; struct test_node map1[] = { { T_STR, 0, "center" }, { T_MAP, 0, 0, point1 }, { T_STR, 0, "radius" }, { T_STR, 0, "7" }, end_node }; struct test_node map2[] = { { T_STR, 0, "start" }, { T_MAP, 0, 0, point1 }, { T_STR, 0, "finish" }, { T_MAP, 0, 0, point2 }, end_node }; struct test_node map3[] = { { T_STR, 0, "start" }, { T_MAP, 0, 0, point1 }, { T_STR, 0, "color" }, { T_STR, 0, "0xFFEEBB" }, { T_STR, 0, "value" }, { T_STR, 0, "Pretty vector drawing." }, end_node }; struct test_node seq[] = { { T_MAP, "tag:clarkevans.com,2002:graph/circle", 0, map1 }, { T_MAP, "tag:clarkevans.com,2002:graph/line", 0, map2 }, { T_MAP, "tag:clarkevans.com,2002:graph/label", 0, map3 }, end_node }; struct test_node stream[] = { { T_SEQ, "tag:clarkevans.com,2002:graph/shape", 0, seq }, end_node }; CuStreamCompare( tc, /* YAML document */ "# Establish a tag prefix\n" "--- !clarkevans.com,2002/graph/^shape\n" " # Use the prefix: shorthand for\n" " # !clarkevans.com,2002/graph/circle\n" "- !^circle\n" " center: &ORIGIN {x: 73, 'y': 129}\n" " radius: 7\n" "- !^line # !clarkevans.com,2002/graph/line\n" " start: *ORIGIN\n" " finish: { x: 89, 'y': 102 }\n" "- !^label\n" " start: *ORIGIN\n" " color: 0xFFEEBB\n" " value: Pretty vector drawing.\n" , /* C structure of validations */ stream ); CuRoundTrip( tc, stream ); } /* * Example 2.26: Ordered mappings */ void YtsSpecificationExamples_26( CuTest *tc ) { struct test_node map1[] = { { T_STR, 0, "Mark McGwire" }, { T_STR, 0, "65" }, end_node }; struct test_node map2[] = { { T_STR, 0, "Sammy Sosa" }, { T_STR, 0, "63" }, end_node }; struct test_node map3[] = { { T_STR, 0, "Ken Griffy" }, { T_STR, 0, "58" }, end_node }; struct test_node seq[] = { { T_MAP, 0, 0, map1 }, { T_MAP, 0, 0, map2 }, { T_MAP, 0, 0, map3 }, end_node }; struct test_node stream[] = { { T_SEQ, "tag:yaml.org,2002:omap", 0, seq }, end_node }; CuStreamCompare( tc, /* YAML document */ "# ordered maps are represented as\n" "# a sequence of mappings, with\n" "# each mapping having one key\n" "--- !omap\n" "- Mark McGwire: 65\n" "- Sammy Sosa: 63\n" "- Ken Griffy: 58\n" , /* C structure of validations */ stream ); CuRoundTrip( tc, stream ); } /* * Example 2.27: Invoice */ void YtsSpecificationExamples_27( CuTest *tc ) { struct test_node prod1[] = { { T_STR, 0, "sku" }, { T_STR, 0, "BL394D" }, { T_STR, 0, "quantity" }, { T_STR, 0, "4" }, { T_STR, 0, "description" }, { T_STR, 0, "Basketball" }, { T_STR, 0, "price" }, { T_STR, 0, "450.00" }, end_node }; struct test_node prod2[] = { { T_STR, 0, "sku" }, { T_STR, 0, "BL4438H" }, { T_STR, 0, "quantity" }, { T_STR, 0, "1" }, { T_STR, 0, "description" }, { T_STR, 0, "Super Hoop" }, { T_STR, 0, "price" }, { T_STR, 0, "2392.00" }, end_node }; struct test_node products[] = { { T_MAP, 0, 0, prod1 }, { T_MAP, 0, 0, prod2 }, end_node }; struct test_node address[] = { { T_STR, 0, "lines" }, { T_STR, 0, "458 Walkman Dr.\nSuite #292\n" }, { T_STR, 0, "city" }, { T_STR, 0, "Royal Oak" }, { T_STR, 0, "state" }, { T_STR, 0, "MI" }, { T_STR, 0, "postal" }, { T_STR, 0, "48046" }, end_node }; struct test_node id001[] = { { T_STR, 0, "given" }, { T_STR, 0, "Chris" }, { T_STR, 0, "family" }, { T_STR, 0, "Dumars" }, { T_STR, 0, "address" }, { T_MAP, 0, 0, address }, end_node }; struct test_node map[] = { { T_STR, 0, "invoice" }, { T_STR, 0, "34843" }, { T_STR, 0, "date" }, { T_STR, 0, "2001-01-23" }, { T_STR, 0, "bill-to" }, { T_MAP, 0, 0, id001 }, { T_STR, 0, "ship-to" }, { T_MAP, 0, 0, id001 }, { T_STR, 0, "product" }, { T_SEQ, 0, 0, products }, { T_STR, 0, "tax" }, { T_STR, 0, "251.42" }, { T_STR, 0, "total" }, { T_STR, 0, "4443.52" }, { T_STR, 0, "comments" }, { T_STR, 0, "Late afternoon is best. Backup contact is Nancy Billsmer @ 338-4338.\n" }, end_node }; struct test_node stream[] = { { T_MAP, "tag:clarkevans.com,2002:invoice", 0, map }, end_node }; CuStreamCompare( tc, /* YAML document */ "--- !clarkevans.com,2002/^invoice\n" "invoice: 34843\n" "date : 2001-01-23\n" "bill-to: &id001\n" " given : Chris\n" " family : Dumars\n" " address:\n" " lines: |\n" " 458 Walkman Dr.\n" " Suite #292\n" " city : Royal Oak\n" " state : MI\n" " postal : 48046\n" "ship-to: *id001\n" "product:\n" " - sku : BL394D\n" " quantity : 4\n" " description : Basketball\n" " price : 450.00\n" " - sku : BL4438H\n" " quantity : 1\n" " description : Super Hoop\n" " price : 2392.00\n" "tax : 251.42\n" "total: 4443.52\n" "comments: >\n" " Late afternoon is best.\n" " Backup contact is Nancy\n" " Billsmer @ 338-4338.\n" , /* C structure of validations */ stream ); CuRoundTrip( tc, stream ); } /* * Example 2.28: Log file */ void YtsSpecificationExamples_28( CuTest *tc ) { struct test_node map1[] = { { T_STR, 0, "Time" }, { T_STR, 0, "2001-11-23 15:01:42 -05:00" }, { T_STR, 0, "User" }, { T_STR, 0, "ed" }, { T_STR, 0, "Warning" }, { T_STR, 0, "This is an error message for the log file\n" }, end_node }; struct test_node map2[] = { { T_STR, 0, "Time" }, { T_STR, 0, "2001-11-23 15:02:31 -05:00" }, { T_STR, 0, "User" }, { T_STR, 0, "ed" }, { T_STR, 0, "Warning" }, { T_STR, 0, "A slightly different error message.\n" }, end_node }; struct test_node file1[] = { { T_STR, 0, "file" }, { T_STR, 0, "TopClass.py" }, { T_STR, 0, "line" }, { T_STR, 0, "23" }, { T_STR, 0, "code" }, { T_STR, 0, "x = MoreObject(\"345\\n\")\n" }, end_node }; struct test_node file2[] = { { T_STR, 0, "file" }, { T_STR, 0, "MoreClass.py" }, { T_STR, 0, "line" }, { T_STR, 0, "58" }, { T_STR, 0, "code" }, { T_STR, 0, "foo = bar" }, end_node }; struct test_node stack[] = { { T_MAP, 0, 0, file1 }, { T_MAP, 0, 0, file2 }, end_node }; struct test_node map3[] = { { T_STR, 0, "Date" }, { T_STR, 0, "2001-11-23 15:03:17 -05:00" }, { T_STR, 0, "User" }, { T_STR, 0, "ed" }, { T_STR, 0, "Fatal" }, { T_STR, 0, "Unknown variable \"bar\"\n" }, { T_STR, 0, "Stack" }, { T_SEQ, 0, 0, stack }, end_node }; struct test_node stream[] = { { T_MAP, 0, 0, map1 }, { T_MAP, 0, 0, map2 }, { T_MAP, 0, 0, map3 }, end_node }; CuStreamCompare( tc, /* YAML document */ "---\n" "Time: 2001-11-23 15:01:42 -05:00\n" "User: ed\n" "Warning: >\n" " This is an error message\n" " for the log file\n" "---\n" "Time: 2001-11-23 15:02:31 -05:00\n" "User: ed\n" "Warning: >\n" " A slightly different error\n" " message.\n" "---\n" "Date: 2001-11-23 15:03:17 -05:00\n" "User: ed\n" "Fatal: >\n" " Unknown variable \"bar\"\n" "Stack:\n" " - file: TopClass.py\n" " line: 23\n" " code: |\n" " x = MoreObject(\"345\\n\")\n" " - file: MoreClass.py\n" " line: 58\n" " code: |-\n" " foo = bar\n" , /* C structure of validations */ stream ); CuRoundTrip( tc, stream ); } /* * Example : Throwaway comments */ void YtsSpecificationExamples_29( CuTest *tc ) { struct test_node map[] = { { T_STR, 0, "this" }, { T_STR, 0, "contains three lines of text.\nThe third one starts with a\n# character. This isn't a comment.\n" }, end_node }; struct test_node stream[] = { { T_MAP, 0, 0, map }, end_node }; CuStreamCompare( tc, /* YAML document */ "### These are four throwaway comment ### \n" "\n" "### lines (the second line is empty). ### \n" "this: | # Comments may trail lines.\n" " contains three lines of text.\n" " The third one starts with a\n" " # character. This isn't a comment.\n" "\n" "# These are three throwaway comment\n" "# lines (the first line is empty).\n" , /* C structure of validations */ stream ); CuRoundTrip( tc, stream ); } /* * Example : Document with a single value */ void YtsSpecificationExamples_30( CuTest *tc ) { struct test_node stream[] = { { T_STR, 0, "This YAML stream contains a single text value. The next stream is a log file - a sequence of log entries. Adding an entry to the log is a simple matter of appending it at the end.\n" }, end_node }; CuStreamCompare( tc, /* YAML document */ "--- > \n" "This YAML stream contains a single text value.\n" "The next stream is a log file - a sequence of\n" "log entries. Adding an entry to the log is a\n" "simple matter of appending it at the end.\n" , /* C structure of validations */ stream ); CuRoundTrip( tc, stream ); } /* * Example : Document stream */ void YtsSpecificationExamples_31( CuTest *tc ) { struct test_node map1[] = { { T_STR, 0, "at" }, { T_STR, 0, "2001-08-12 09:25:00.00 Z" }, { T_STR, 0, "type" }, { T_STR, 0, "GET" }, { T_STR, 0, "HTTP" }, { T_STR, 0, "1.0" }, { T_STR, 0, "url" }, { T_STR, 0, "/index.html" }, end_node }; struct test_node map2[] = { { T_STR, 0, "at" }, { T_STR, 0, "2001-08-12 09:25:10.00 Z" }, { T_STR, 0, "type" }, { T_STR, 0, "GET" }, { T_STR, 0, "HTTP" }, { T_STR, 0, "1.0" }, { T_STR, 0, "url" }, { T_STR, 0, "/toc.html" }, end_node }; struct test_node stream[] = { { T_MAP, 0, 0, map1 }, { T_MAP, 0, 0, map2 }, end_node }; CuStreamCompare( tc, /* YAML document */ "--- \n" "at: 2001-08-12 09:25:00.00 Z \n" "type: GET \n" "HTTP: '1.0' \n" "url: '/index.html' \n" "--- \n" "at: 2001-08-12 09:25:10.00 Z \n" "type: GET \n" "HTTP: '1.0' \n" "url: '/toc.html' \n" , /* C structure of validations */ stream ); CuRoundTrip( tc, stream ); } /* * Example : Top level mapping */ void YtsSpecificationExamples_32( CuTest *tc ) { struct test_node map[] = { { T_STR, 0, "invoice" }, { T_STR, 0, "34843" }, { T_STR, 0, "date" }, { T_STR, 0, "2001-01-23" }, { T_STR, 0, "total" }, { T_STR, 0, "4443.52" }, end_node }; struct test_node stream[] = { { T_MAP, 0, 0, map }, end_node }; CuStreamCompare( tc, /* YAML document */ "# This stream is an example of a top-level mapping. \n" "invoice : 34843 \n" "date : 2001-01-23 \n" "total : 4443.52 \n" , /* C structure of validations */ stream ); CuRoundTrip( tc, stream ); } /* * Example : Single-line documents */ void YtsSpecificationExamples_33( CuTest *tc ) { struct test_node map[] = { end_node }; struct test_node seq[] = { end_node }; struct test_node stream[] = { { T_MAP, 0, 0, map }, { T_SEQ, 0, 0, seq }, { T_STR, 0, "" }, end_node }; CuStreamCompare( tc, /* YAML document */ "# The following is a sequence of three documents. \n" "# The first contains an empty mapping, the second \n" "# an empty sequence, and the last an empty string. \n" "--- {} \n" "--- [ ] \n" "--- '' \n" , /* C structure of validations */ stream ); CuRoundTrip( tc, stream ); } /* * Example : Document with pause */ void YtsSpecificationExamples_34( CuTest *tc ) { struct test_node map1[] = { { T_STR, 0, "sent at" }, { T_STR, 0, "2002-06-06 11:46:25.10 Z" }, { T_STR, 0, "payload" }, { T_STR, 0, "Whatever" }, end_node }; struct test_node map2[] = { { T_STR, 0, "sent at" }, { T_STR, 0, "2002-06-06 12:05:53.47 Z" }, { T_STR, 0, "payload" }, { T_STR, 0, "Whatever" }, end_node }; struct test_node stream[] = { { T_MAP, 0, 0, map1 }, { T_MAP, 0, 0, map2 }, end_node }; CuStreamCompare( tc, /* YAML document */ "# A communication channel based on a YAML stream. \n" "--- \n" "sent at: 2002-06-06 11:46:25.10 Z \n" "payload: Whatever \n" "# Receiver can process this as soon as the following is sent: \n" "... \n" "# Even if the next message is sent long after: \n" "--- \n" "sent at: 2002-06-06 12:05:53.47 Z \n" "payload: Whatever \n" "... \n" , /* C structure of validations */ stream ); CuRoundTrip( tc, stream ); } /* * Example : Explicit typing */ void YtsSpecificationExamples_35( CuTest *tc ) { struct test_node map[] = { { T_STR, 0, "integer" }, { T_STR, "tag:yaml.org,2002:int", "12" }, { T_STR, 0, "also int" }, { T_STR, "tag:yaml.org,2002:int", "12" }, { T_STR, 0, "string" }, { T_STR, "tag:yaml.org,2002:str", "12" }, end_node }; struct test_node stream[] = { { T_MAP, 0, 0, map }, end_node }; CuStreamCompare( tc, /* YAML document */ "integer: 12 \n" "also int: ! \"12\" \n" "string: !str 12 \n" , /* C structure of validations */ stream ); CuRoundTrip( tc, stream ); } /* * Example : Private types */ void YtsSpecificationExamples_36( CuTest *tc ) { struct test_node pool[] = { { T_STR, 0, "number" }, { T_STR, 0, "8" }, { T_STR, 0, "color" }, { T_STR, 0, "black" }, end_node }; struct test_node map1[] = { { T_STR, 0, "pool" }, { T_MAP, "x-private:ball", 0, pool }, end_node }; struct test_node bearing[] = { { T_STR, 0, "material" }, { T_STR, 0, "steel" }, end_node }; struct test_node map2[] = { { T_STR, 0, "bearing" }, { T_MAP, "x-private:ball", 0, bearing }, end_node }; struct test_node stream[] = { { T_MAP, 0, 0, map1 }, { T_MAP, 0, 0, map2 }, end_node }; CuStreamCompare( tc, /* YAML document */ "# Both examples below make use of the 'x-private:ball' \n" "# type family URI, but with different semantics. \n" "--- \n" "pool: !!ball \n" " number: 8 \n" " color: black \n" "--- \n" "bearing: !!ball \n" " material: steel \n" , /* C structure of validations */ stream ); CuRoundTrip( tc, stream ); } /* * Example : Type family under yaml.org */ void YtsSpecificationExamples_37( CuTest *tc ) { struct test_node seq[] = { { T_STR, "tag:yaml.org,2002:str", "a Unicode string" }, end_node }; struct test_node stream[] = { { T_SEQ, 0, 0, seq }, end_node }; CuStreamCompare( tc, /* YAML document */ "# The URI is 'tag:yaml.org,2002:str' \n" "- !str a Unicode string \n" , /* C structure of validations */ stream ); CuRoundTrip( tc, stream ); } /* * Example : Type family under perl.yaml.org */ void YtsSpecificationExamples_38( CuTest *tc ) { struct test_node map[] = { end_node }; struct test_node seq[] = { { T_MAP, "tag:perl.yaml.org,2002:Text::Tabs", 0, map }, end_node }; struct test_node stream[] = { { T_SEQ, 0, 0, seq }, end_node }; CuStreamCompare( tc, /* YAML document */ "# The URI is 'tag:perl.yaml.org,2002:Text::Tabs' \n" "- !perl/Text::Tabs {} \n" , /* C structure of validations */ stream ); CuRoundTrip( tc, stream ); } /* * Example : Type family under clarkevans.com */ void YtsSpecificationExamples_39( CuTest *tc ) { struct test_node map[] = { end_node }; struct test_node seq[] = { { T_MAP, "tag:clarkevans.com,2003-02:timesheet", 0, map }, end_node }; struct test_node stream[] = { { T_SEQ, 0, 0, seq }, end_node }; CuStreamCompare( tc, /* YAML document */ "# The URI is 'tag:clarkevans.com,2003-02:timesheet' \n" "- !clarkevans.com,2003-02/timesheet {}\n" , /* C structure of validations */ stream ); CuRoundTrip( tc, stream ); } /* * Example : URI Escaping */ void YtsSpecificationExamples_40( CuTest *tc ) { struct test_node same[] = { { T_STR, "tag:domain.tld,2002:type0", "value" }, { T_STR, "tag:domain.tld,2002:type0", "value" }, end_node }; struct test_node diff[] = { { T_STR, "tag:domain.tld,2002:type%30", "value" }, { T_STR, "tag:domain.tld,2002:type0", "value" }, end_node }; struct test_node map[] = { { T_STR, 0, "same" }, { T_SEQ, 0, 0, same }, { T_STR, 0, "different" }, { T_SEQ, 0, 0, diff }, end_node }; struct test_node stream[] = { { T_MAP, 0, 0, map }, end_node }; CuStreamCompare( tc, /* YAML document */ "same: \n" " - !domain.tld,2002/type\\x30 value\n" " - !domain.tld,2002/type0 value\n" "different: # As far as the YAML parser is concerned \n" " - !domain.tld,2002/type%30 value\n" " - !domain.tld,2002/type0 value\n" , /* C structure of validations */ stream ); CuRoundTrip( tc, stream ); } /* * Example : Overriding anchors */ void YtsSpecificationExamples_42( CuTest *tc ) { struct test_node map[] = { { T_STR, 0, "anchor" }, { T_STR, 0, "This scalar has an anchor." }, { T_STR, 0, "override" }, { T_STR, 0, "The alias node below is a repeated use of this value.\n" }, { T_STR, 0, "alias" }, { T_STR, 0, "The alias node below is a repeated use of this value.\n" }, end_node }; struct test_node stream[] = { { T_MAP, 0, 0, map }, end_node }; CuStreamCompare( tc, /* YAML document */ "anchor : &A001 This scalar has an anchor. \n" "override : &A001 >\n" " The alias node below is a\n" " repeated use of this value.\n" "alias : *A001\n" , /* C structure of validations */ stream ); CuRoundTrip( tc, stream ); } /* * Example : Flow and block formatting */ void YtsSpecificationExamples_43( CuTest *tc ) { struct test_node empty[] = { end_node }; struct test_node flow[] = { { T_STR, 0, "one" }, { T_STR, 0, "two" }, { T_STR, 0, "three" }, { T_STR, 0, "four" }, { T_STR, 0, "five" }, end_node }; struct test_node inblock[] = { { T_STR, 0, "Subordinate sequence entry" }, end_node }; struct test_node block[] = { { T_STR, 0, "First item in top sequence" }, { T_SEQ, 0, 0, inblock }, { T_STR, 0, "A folded sequence entry\n" }, { T_STR, 0, "Sixth item in top sequence" }, end_node }; struct test_node map[] = { { T_STR, 0, "empty" }, { T_SEQ, 0, 0, empty }, { T_STR, 0, "flow" }, { T_SEQ, 0, 0, flow }, { T_STR, 0, "block" }, { T_SEQ, 0, 0, block }, end_node }; struct test_node stream[] = { { T_MAP, 0, 0, map }, end_node }; CuStreamCompare( tc, /* YAML document */ "empty: [] \n" "flow: [ one, two, three # May span lines, \n" " , four, # indentation is \n" " five ] # mostly ignored. \n" "block: \n" " - First item in top sequence \n" " - \n" " - Subordinate sequence entry \n" " - > \n" " A folded sequence entry\n" " - Sixth item in top sequence \n" , /* C structure of validations */ stream ); CuRoundTrip( tc, stream ); } /* * Example : Timestamp */ void YtsSpecificationExamples_62( CuTest *tc ) { struct test_node map[] = { { T_STR, 0, "canonical" }, { T_STR, 0, "2001-12-15T02:59:43.1Z" }, { T_STR, 0, "valid iso8601" }, { T_STR, 0, "2001-12-14t21:59:43.10-05:00" }, { T_STR, 0, "space separated" }, { T_STR, 0, "2001-12-14 21:59:43.10 -05:00" }, { T_STR, 0, "date (noon UTC)" }, { T_STR, 0, "2002-12-14" }, end_node }; struct test_node stream[] = { { T_MAP, 0, 0, map }, end_node }; CuStreamCompare( tc, /* YAML document */ "canonical: 2001-12-15T02:59:43.1Z \n" "valid iso8601: 2001-12-14t21:59:43.10-05:00 \n" "space separated: 2001-12-14 21:59:43.10 -05:00 \n" "date (noon UTC): 2002-12-14 \n" , /* C structure of validations */ stream ); CuRoundTrip( tc, stream ); } CuSuite * SyckGetSuite() { CuSuite *suite = CuSuiteNew(); SUITE_ADD_TEST( suite, YtsSpecificationExamples_0 ); SUITE_ADD_TEST( suite, YtsSpecificationExamples_1 ); SUITE_ADD_TEST( suite, YtsSpecificationExamples_2 ); SUITE_ADD_TEST( suite, YtsSpecificationExamples_3 ); SUITE_ADD_TEST( suite, YtsSpecificationExamples_4 ); SUITE_ADD_TEST( suite, YtsSpecificationExamples_5 ); SUITE_ADD_TEST( suite, YtsSpecificationExamples_6 ); SUITE_ADD_TEST( suite, YtsSpecificationExamples_7 ); SUITE_ADD_TEST( suite, YtsSpecificationExamples_8 ); SUITE_ADD_TEST( suite, YtsSpecificationExamples_9 ); SUITE_ADD_TEST( suite, YtsSpecificationExamples_10 ); SUITE_ADD_TEST( suite, YtsSpecificationExamples_11 ); SUITE_ADD_TEST( suite, YtsSpecificationExamples_12 ); SUITE_ADD_TEST( suite, YtsSpecificationExamples_13 ); SUITE_ADD_TEST( suite, YtsSpecificationExamples_14 ); SUITE_ADD_TEST( suite, YtsSpecificationExamples_15 ); SUITE_ADD_TEST( suite, YtsSpecificationExamples_16 ); SUITE_ADD_TEST( suite, YtsSpecificationExamples_18 ); SUITE_ADD_TEST( suite, YtsSpecificationExamples_19 ); SUITE_ADD_TEST( suite, YtsSpecificationExamples_20 ); SUITE_ADD_TEST( suite, YtsSpecificationExamples_21 ); SUITE_ADD_TEST( suite, YtsSpecificationExamples_22 ); SUITE_ADD_TEST( suite, YtsSpecificationExamples_23 ); SUITE_ADD_TEST( suite, YtsSpecificationExamples_24 ); SUITE_ADD_TEST( suite, YtsSpecificationExamples_25 ); SUITE_ADD_TEST( suite, YtsSpecificationExamples_26 ); SUITE_ADD_TEST( suite, YtsSpecificationExamples_27 ); SUITE_ADD_TEST( suite, YtsSpecificationExamples_28 ); SUITE_ADD_TEST( suite, YtsSpecificationExamples_29 ); SUITE_ADD_TEST( suite, YtsSpecificationExamples_30 ); SUITE_ADD_TEST( suite, YtsSpecificationExamples_31 ); SUITE_ADD_TEST( suite, YtsSpecificationExamples_32 ); SUITE_ADD_TEST( suite, YtsSpecificationExamples_33 ); SUITE_ADD_TEST( suite, YtsSpecificationExamples_34 ); SUITE_ADD_TEST( suite, YtsSpecificationExamples_35 ); SUITE_ADD_TEST( suite, YtsSpecificationExamples_36 ); SUITE_ADD_TEST( suite, YtsSpecificationExamples_37 ); SUITE_ADD_TEST( suite, YtsSpecificationExamples_38 ); SUITE_ADD_TEST( suite, YtsSpecificationExamples_39 ); SUITE_ADD_TEST( suite, YtsSpecificationExamples_40 ); SUITE_ADD_TEST( suite, YtsSpecificationExamples_42 ); SUITE_ADD_TEST( suite, YtsSpecificationExamples_43 ); SUITE_ADD_TEST( suite, YtsSpecificationExamples_62 ); return suite; } int main(void) { CuString *output = CuStringNew(); CuSuite* suite = SyckGetSuite(); int count; CuSuiteRun(suite); CuSuiteSummary(suite, output); CuSuiteDetails(suite, output); printf("%s\n", output->buffer); count = suite->failCount; CuStringFree( output ); CuSuiteFree( suite ); return count; } ruby-mkrf-0.2.3.orig/test/sample_files/syck-0.55/tests/CuTest.c0000644000000000000000000001516511672453175022607 0ustar rootroot#include #include #include #include #include #include "CuTest.h" /*-------------------------------------------------------------------------* * CuStr *-------------------------------------------------------------------------*/ char* CuStrAlloc(int size) { char* new = (char*) malloc( sizeof(char) * (size) ); return new; } char* CuStrCopy(char* old) { int len = strlen(old); char* new = CuStrAlloc(len + 1); strcpy(new, old); return new; } /*-------------------------------------------------------------------------* * CuString *-------------------------------------------------------------------------*/ void CuStringInit(CuString* str) { str->length = 0; str->size = STRING_MAX; str->buffer = (char*) malloc(sizeof(char) * str->size); str->buffer[0] = '\0'; } CuString* CuStringNew(void) { CuString* str = (CuString*) malloc(sizeof(CuString)); str->length = 0; str->size = STRING_MAX; str->buffer = (char*) malloc(sizeof(char) * str->size); str->buffer[0] = '\0'; return str; } void CuStringResize(CuString* str, int newSize) { str->buffer = (char*) realloc(str->buffer, sizeof(char) * newSize); str->size = newSize; } void CuStringAppend(CuString* str, char* text) { int length = strlen(text); CuStringAppendLen(str, text, length); } void CuStringAppendLen(CuString* str, char* text, long length) { if (str->length + length + 1 >= str->size) CuStringResize(str, str->length + length + 1 + STRING_INC); str->length += length; strcat(str->buffer, text); } void CuStringAppendChar(CuString* str, char ch) { char text[2]; text[0] = ch; text[1] = '\0'; CuStringAppend(str, text); } void CuStringAppendFormat(CuString* str, char* format, ...) { va_list argp; char buf[HUGE_STRING_LEN]; va_start(argp, format); vsprintf(buf, format, argp); va_end(argp); CuStringAppend(str, buf); } void CuStringFree(CuString* str) { if ( str != NULL ) { free( str->buffer ); free( str ); } } /*-------------------------------------------------------------------------* * CuTest *-------------------------------------------------------------------------*/ void CuTestInit(CuTest* t, char* name, TestFunction function) { t->name = CuStrCopy(name); t->failed = 0; t->ran = 0; t->message = NULL; t->function = function; t->jumpBuf = NULL; } CuTest* CuTestNew(char* name, TestFunction function) { CuTest* tc = CU_ALLOC(CuTest); CuTestInit(tc, name, function); return tc; } void CuTestFree(CuTest* t) { if ( t != NULL ) { free( t->name ); free( t ); } } void CuFail(CuTest* tc, char* message) { tc->failed = 1; tc->message = CuStrCopy(message); if (tc->jumpBuf != 0) longjmp(*(tc->jumpBuf), 0); } void CuAssert(CuTest* tc, char* message, int condition) { if (condition) return; CuFail(tc, message); } void CuAssertTrue(CuTest* tc, int condition) { if (condition) return; CuFail(tc, "assert failed"); } void CuAssertStrEquals(CuTest* tc, char* expected, char* actual) { CuString* message; if (strcmp(expected, actual) == 0) return; message = CuStringNew(); CuStringAppend(message, "expected <"); CuStringAppend(message, expected); CuStringAppend(message, "> but was <"); CuStringAppend(message, actual); CuStringAppend(message, ">"); CuFail(tc, message->buffer); } void CuAssertIntEquals(CuTest* tc, int expected, int actual) { char buf[STRING_MAX]; if (expected == actual) return; sprintf(buf, "expected <%d> but was <%d>", expected, actual); CuFail(tc, buf); } void CuAssertPtrEquals(CuTest* tc, void* expected, void* actual) { char buf[STRING_MAX]; if (expected == actual) return; sprintf(buf, "expected pointer <0x%p> but was <0x%p>", expected, actual); CuFail(tc, buf); } void CuAssertPtrNotNull(CuTest* tc, void* pointer) { char buf[STRING_MAX]; if (pointer != NULL ) return; sprintf(buf, "null pointer unexpected"); CuFail(tc, buf); } void CuTestRun(CuTest* tc) { jmp_buf buf; tc->jumpBuf = &buf; if (setjmp(buf) == 0) { tc->ran = 1; (tc->function)(tc); } tc->jumpBuf = 0; } /*-------------------------------------------------------------------------* * CuSuite *-------------------------------------------------------------------------*/ void CuSuiteInit(CuSuite* testSuite) { testSuite->count = 0; testSuite->failCount = 0; } CuSuite* CuSuiteNew() { CuSuite* testSuite = CU_ALLOC(CuSuite); CuSuiteInit(testSuite); return testSuite; } void CuSuiteFree(CuSuite* testSuite) { int i; for (i = 0 ; i < testSuite->count ; ++i) { CuTestFree( testSuite->list[i] ); } free( testSuite ); } void CuSuiteAdd(CuSuite* testSuite, CuTest *testCase) { assert(testSuite->count < MAX_TEST_CASES); testSuite->list[testSuite->count] = testCase; testSuite->count++; } void CuSuiteAddSuite(CuSuite* testSuite, CuSuite* testSuite2) { int i; for (i = 0 ; i < testSuite2->count ; ++i) { CuTest* testCase = testSuite2->list[i]; CuSuiteAdd(testSuite, testCase); } } void CuSuiteRun(CuSuite* testSuite) { int i; for (i = 0 ; i < testSuite->count ; ++i) { CuTest* testCase = testSuite->list[i]; CuTestRun(testCase); if (testCase->failed) { testSuite->failCount += 1; } } } void CuSuiteSummary(CuSuite* testSuite, CuString* summary) { int i; for (i = 0 ; i < testSuite->count ; ++i) { CuTest* testCase = testSuite->list[i]; CuStringAppend(summary, testCase->failed ? "F" : "."); } CuStringAppend(summary, "\n\n"); } void CuSuiteDetails(CuSuite* testSuite, CuString* details) { int i; int failCount = 0; if (testSuite->failCount == 0) { int passCount = testSuite->count - testSuite->failCount; char* testWord = passCount == 1 ? "test" : "tests"; CuStringAppendFormat(details, "OK (%d %s)\n", passCount, testWord); } else { if (testSuite->failCount == 1) CuStringAppend(details, "There was 1 failure:\n"); else CuStringAppendFormat(details, "There were %d failures:\n", testSuite->failCount); for (i = 0 ; i < testSuite->count ; ++i) { CuTest* testCase = testSuite->list[i]; if (testCase->failed) { failCount++; CuStringAppendFormat(details, "%d) %s: %s\n", failCount, testCase->name, testCase->message); } } CuStringAppend(details, "\n!!!FAILURES!!!\n"); CuStringAppendFormat(details, "Runs: %d ", testSuite->count); CuStringAppendFormat(details, "Passes: %d ", testSuite->count - testSuite->failCount); CuStringAppendFormat(details, "Fails: %d\n", testSuite->failCount); } } ruby-mkrf-0.2.3.orig/test/sample_files/syck-0.55/tests/Makefile.am0000644000000000000000000000070511672453175023262 0ustar rootrootINCLUDES = -I$(top_srcdir)/lib LDFLAGS = -L$(top_srcdir)/lib TESTS = test-basic test-parse test-yts test-emit noinst_PROGRAMS = test-basic test-parse test-yts test-emit test_basic_SOURCES = Basic.c CuTest.c CuTest.h test_basic_LDADD = -lsyck test_parse_SOURCES = Parse.c CuTest.c CuTest.h test_parse_LDADD = -lsyck test_yts_SOURCES = YTS.c CuTest.c CuTest.h test_yts_LDADD = -lsyck test_emit_SOURCES = Emit.c CuTest.c CuTest.h test_emit_LDADD = -lsyck ruby-mkrf-0.2.3.orig/test/sample_files/syck-0.55/tests/Basic.c0000644000000000000000000001020111672453175022403 0ustar rootroot// // Basic.c // // $Author: why $ // $Date: 2004/05/15 02:31:52 $ // // Copyright (C) 2003 why the lucky stiff // #include "syck.h" #include "CuTest.h" // // Test allocating a single node of kind 'str'. // void TestSyckNodeAlloc( CuTest *tc ) { SyckNode* n; n = syck_new_str( "YAML", scalar_plain ); CuAssert( tc, "Allocated 'str' node reporting as 'seq'.", n->kind != syck_seq_kind ); CuAssert( tc, "Allocated 'str' node reporting as 'map'.", n->kind != syck_map_kind ); CuAssert( tc, "Allocated 'str' not reporting as 'str'.", n->kind == syck_str_kind ); CuAssertStrEquals( tc, "YAML", syck_str_read( n ) ); syck_free_node( n ); } // // Test building a simple sequence // void TestSyckSeqAlloc( CuTest *tc ) { SyckNode *n; SYMID id; n = syck_new_seq( 1 ); for ( id = 11001; id < 23000; id += 24 ) { syck_seq_add( n, id ); } CuAssert( tc, "Invalid value at '0'", 1 == syck_seq_read( n, 0 ) ); CuAssert( tc, "Invalid value at '1'", 11001 == syck_seq_read( n, 1 ) ); CuAssert( tc, "Invalid value at '200'", 15801 == syck_seq_read( n, 201 ) ); syck_free_node( n ); } // // Test building a simple map // void TestSyckMapAlloc( CuTest *tc ) { SyckNode *n; n = syck_new_map( 24556, 24557 ); syck_map_add( n, 24558, 24559 ); syck_map_add( n, 24658, 24659 ); syck_map_add( n, 24758, 24759 ); syck_map_add( n, 24858, 24859 ); syck_map_add( n, 24958, 24959 ); syck_map_add( n, 24058, 24059 ); syck_map_add( n, 24158, 24159 ); CuAssert( tc, "Invalid key at '0'.", 24556 == syck_map_read( n, map_key, 0 ) ); CuAssert( tc, "Invalid key at '1'.", 24558 == syck_map_read( n, map_key, 1 ) ); CuAssert( tc, "Invalid key at '2'.", 24658 == syck_map_read( n, map_key, 2 ) ); CuAssert( tc, "Invalid key at '3'.", 24758 == syck_map_read( n, map_key, 3 ) ); CuAssert( tc, "Invalid key at '4'.", 24858 == syck_map_read( n, map_key, 4 ) ); CuAssert( tc, "Invalid key at '5'.", 24958 == syck_map_read( n, map_key, 5 ) ); CuAssert( tc, "Invalid key at '6'.", 24058 == syck_map_read( n, map_key, 6 ) ); CuAssert( tc, "Invalid key at '7'.", 24158 == syck_map_read( n, map_key, 7 ) ); CuAssert( tc, "Invalid value at '0'", 24557 == syck_map_read( n, map_value, 0 ) ); CuAssert( tc, "Invalid value at '1'", 24559 == syck_map_read( n, map_value, 1 ) ); CuAssert( tc, "Invalid value at '2'", 24659 == syck_map_read( n, map_value, 2 ) ); CuAssert( tc, "Invalid value at '3'", 24759 == syck_map_read( n, map_value, 3 ) ); CuAssert( tc, "Invalid value at '4'", 24859 == syck_map_read( n, map_value, 4 ) ); CuAssert( tc, "Invalid value at '5'", 24959 == syck_map_read( n, map_value, 5 ) ); CuAssert( tc, "Invalid value at '6'", 24059 == syck_map_read( n, map_value, 6 ) ); CuAssert( tc, "Invalid value at '7'", 24159 == syck_map_read( n, map_value, 7 ) ); syck_free_node( n ); } // // Test building a simple map // void TestSyckMapUpdate( CuTest *tc ) { SyckNode *n1, *n2; n1 = syck_new_map( 51116, 51117 ); syck_map_add( n1, 51118, 51119 ); n2 = syck_new_map( 51126, 51127 ); syck_map_add( n2, 51128, 51129 ); syck_map_update( n1, n2 ); CuAssert( tc, "Invalid key at '2'", 51126 == syck_map_read( n1, map_key, 2 ) ); CuAssert( tc, "Invalid key at '3'", 51128 == syck_map_read( n1, map_key, 3 ) ); CuAssert( tc, "Invalid value at '2'", 51127 == syck_map_read( n1, map_value, 2 ) ); CuAssert( tc, "Invalid value at '3'", 51129 == syck_map_read( n1, map_value, 3 ) ); syck_free_node( n2 ); syck_free_node( n1 ); } CuSuite * SyckGetSuite() { CuSuite *suite = CuSuiteNew(); SUITE_ADD_TEST( suite, TestSyckNodeAlloc ); SUITE_ADD_TEST( suite, TestSyckSeqAlloc ); SUITE_ADD_TEST( suite, TestSyckMapAlloc ); SUITE_ADD_TEST( suite, TestSyckMapUpdate ); return suite; } int main(void) { CuString *output = CuStringNew(); CuSuite* suite = SyckGetSuite(); int count; CuSuiteRun(suite); CuSuiteSummary(suite, output); CuSuiteDetails(suite, output); printf("%s\n", output->buffer); count = suite->failCount; CuStringFree( output ); CuSuiteFree( suite ); return count; } ruby-mkrf-0.2.3.orig/test/sample_files/syck-0.55/tests/YTS.c.erb0000644000000000000000000002024411672453175022620 0ustar rootroot// // YTS.c // // $Author: why $ // $Date: 2005/04/13 06:27:54 $ // // Copyright (C) 2004 why the lucky stiff // // Well, this is the Yaml Testing Suite in the form of a plain C // API. Basically, this is as good as C integration gets for Syck. // You've got to have a symbol table around. From there, you can // query your data. // #include #include "syck.h" #include "CuTest.h" /* YAML test node structures */ #define T_STR 10 #define T_SEQ 20 #define T_MAP 30 #define T_END 40 #define ILEN 2 struct test_node { int type; char *tag; char *key; struct test_node *value; }; struct test_node end_node = { T_END }; /* * Assertion which compares a YAML document with an * equivalent set of test_node structs. */ SYMID syck_copy_handler(p, n) SyckParser *p; SyckNode *n; { int i = 0; struct test_node *tn = S_ALLOC_N( struct test_node, 1 ); switch ( n->kind ) { case syck_str_kind: tn->type = T_STR; tn->key = syck_strndup( n->data.str->ptr, n->data.str->len ); tn->value = 0; break; case syck_seq_kind: { struct test_node *val; struct test_node *seq = S_ALLOC_N( struct test_node, n->data.list->idx + 1 ); tn->type = T_SEQ; tn->key = 0; for ( i = 0; i < n->data.list->idx; i++ ) { SYMID oid = syck_seq_read( n, i ); syck_lookup_sym( p, oid, (char **)&val ); seq[i] = val[0]; } seq[n->data.list->idx] = end_node; tn->value = seq; } break; case syck_map_kind: { struct test_node *val; struct test_node *map = S_ALLOC_N( struct test_node, ( n->data.pairs->idx * 2 ) + 1 ); tn->type = T_MAP; tn->key = 0; for ( i = 0; i < n->data.pairs->idx; i++ ) { SYMID oid = syck_map_read( n, map_key, i ); syck_lookup_sym( p, oid, (char **)&val ); map[i * 2] = val[0]; oid = syck_map_read( n, map_value, i ); syck_lookup_sym( p, oid, (char **)&val ); map[(i * 2) + 1] = val[0]; } map[n->data.pairs->idx * 2] = end_node; tn->value = map; } break; } tn->tag = 0; if ( n->type_id != NULL ) { tn->tag = syck_strndup( n->type_id, strlen( n->type_id ) ); } return syck_add_sym( p, (char *) tn ); } int syck_free_copies( char *key, struct test_node *tn, char *arg ) { if ( tn != NULL ) { switch ( tn->type ) { case T_STR: S_FREE( tn->key ); break; case T_SEQ: case T_MAP: S_FREE( tn->value ); break; } if ( tn->tag != NULL ) S_FREE( tn->tag ); S_FREE( tn ); } tn = NULL; return ST_CONTINUE; } void CuStreamCompareX( CuTest* tc, struct test_node *s1, struct test_node *s2 ) { int i = 0; while ( 1 ) { CuAssertIntEquals( tc, s1[i].type, s2[i].type ); if ( s1[i].type == T_END ) return; if ( s1[i].tag != 0 && s2[i].tag != 0 ) CuAssertStrEquals( tc, s1[i].tag, s2[i].tag ); switch ( s1[i].type ) { case T_STR: CuAssertStrEquals( tc, s1[i].key, s2[i].key ); break; case T_SEQ: case T_MAP: CuStreamCompareX( tc, s1[i].value, s2[i].value ); break; } i++; } } void CuStreamCompare( CuTest* tc, char *yaml, struct test_node *stream ) { int doc_ct = 0; struct test_node *ystream = S_ALLOC_N( struct test_node, doc_ct + 1 ); CuString *msg; /* Set up parser */ SyckParser *parser = syck_new_parser(); syck_parser_str_auto( parser, yaml, NULL ); syck_parser_handler( parser, syck_copy_handler ); syck_parser_error_handler( parser, NULL ); syck_parser_implicit_typing( parser, 1 ); syck_parser_taguri_expansion( parser, 1 ); /* Parse all streams */ while ( 1 ) { struct test_node *ydoc; SYMID oid = syck_parse( parser ); if ( parser->eof == 1 ) break; /* Add document to stream */ syck_lookup_sym( parser, oid, (char **)&ydoc ); ystream[doc_ct] = ydoc[0]; doc_ct++; S_REALLOC_N( ystream, struct test_node, doc_ct + 1 ); } ystream[doc_ct] = end_node; /* Traverse the struct and the symbol table side-by-side */ /* DEBUG: y( stream, 0 ); y( ystream, 0 ); */ CuStreamCompareX( tc, stream, ystream ); /* Free the node tables and the parser */ S_FREE( ystream ); if ( parser->syms != NULL ) st_foreach( parser->syms, syck_free_copies, 0 ); syck_free_parser( parser ); } /* * Setup for testing N->Y->N. */ void test_output_handler( emitter, str, len ) SyckEmitter *emitter; char *str; long len; { CuString *dest = (CuString *)emitter->bonus; CuStringAppendLen( dest, str, len ); } SYMID build_symbol_table( SyckEmitter *emitter, struct test_node *node ) { switch ( node->type ) { case T_SEQ: case T_MAP: { int i = 0; while ( node->value[i].type != T_END ) { SYMID id = build_symbol_table( emitter, &node->value[i] ); i++; } } return syck_emitter_mark_node( emitter, (st_data_t)node ); case T_STR: return 0; } } void test_emitter_handler( SyckEmitter *emitter, st_data_t data ) { struct test_node *node = (struct test_node *)data; switch ( node->type ) { case T_STR: syck_emit_scalar( emitter, node->tag, scalar_none, 0, 0, 0, node->key, strlen( node->key ) ); break; case T_SEQ: { int i = 0; syck_emit_seq( emitter, node->tag, seq_none ); while ( node->value[i].type != T_END ) { syck_emit_item( emitter, (st_data_t)&node->value[i] ); i++; } syck_emit_end( emitter ); } break; case T_MAP: { int i = 0; syck_emit_map( emitter, node->tag, map_none ); while ( node->value[i].type != T_END ) { syck_emit_item( emitter, (st_data_t)&node->value[i] ); i++; } syck_emit_end( emitter ); } break; } } void CuRoundTrip( CuTest* tc, struct test_node *stream ) { int i = 0; CuString *cs = CuStringNew(); SyckEmitter *emitter = syck_new_emitter(); /* Calculate anchors and tags */ build_symbol_table( emitter, stream ); /* Build the stream */ syck_output_handler( emitter, test_output_handler ); syck_emitter_handler( emitter, test_emitter_handler ); emitter->bonus = cs; while ( stream[i].type != T_END ) { syck_emit( emitter, (st_data_t)&stream[i] ); syck_emitter_flush( emitter, 0 ); i++; } /* Reload the stream and compare */ /* printf( "-- output for %s --\n%s\n--- end of output --\n", tc->name, cs->buffer ); */ CuStreamCompare( tc, cs->buffer, stream ); CuStringFree( cs ); syck_free_emitter( emitter ); } /* * ACTUAL TESTS FOR THE YAML TESTING SUITE BEGIN HERE * (EVERYTHING PREVIOUS WAS SET UP FOR THE TESTS) */ % syck_tests.each do |yt| /* * Example <%= yt['spec'] %>: <%= yt['test'] %> */ void <%= yt['func'] %>( CuTest *tc ) { <%= yt['syck'] %> CuStreamCompare( tc, /* YAML document */ <%= yt['yaml'].gsub( /\\/, '\\\\\\\\' ).gsub( '"', '\"' ).gsub( /^(.*)$/, '"\1\n"' ) %> , /* C structure of validations */ stream ); CuRoundTrip( tc, stream ); } % end CuSuite * SyckGetSuite() { CuSuite *suite = CuSuiteNew(); % syck_tests.each do |yt| SUITE_ADD_TEST( suite, <%= yt['func'] %> ); % end return suite; } int main(void) { CuString *output = CuStringNew(); CuSuite* suite = SyckGetSuite(); int count; CuSuiteRun(suite); CuSuiteSummary(suite, output); CuSuiteDetails(suite, output); printf("%s\n", output->buffer); count = suite->failCount; CuStringFree( output ); CuSuiteFree( suite ); return count; } ruby-mkrf-0.2.3.orig/test/sample_files/syck-0.55/tests/Makefile.in0000644000000000000000000003601311672453175023274 0ustar rootroot# Makefile.in generated by automake 1.9.5 from Makefile.am. # @configure_input@ # Copyright (C) 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002, # 2003, 2004, 2005 Free Software Foundation, Inc. # This Makefile.in is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY, to the extent permitted by law; without # even the implied warranty of MERCHANTABILITY or FITNESS FOR A # PARTICULAR PURPOSE. @SET_MAKE@ SOURCES = $(test_basic_SOURCES) $(test_emit_SOURCES) $(test_parse_SOURCES) $(test_yts_SOURCES) srcdir = @srcdir@ top_srcdir = @top_srcdir@ VPATH = @srcdir@ pkgdatadir = $(datadir)/@PACKAGE@ pkglibdir = $(libdir)/@PACKAGE@ pkgincludedir = $(includedir)/@PACKAGE@ top_builddir = .. am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd INSTALL = @INSTALL@ install_sh_DATA = $(install_sh) -c -m 644 install_sh_PROGRAM = $(install_sh) -c install_sh_SCRIPT = $(install_sh) -c INSTALL_HEADER = $(INSTALL_DATA) transform = $(program_transform_name) NORMAL_INSTALL = : PRE_INSTALL = : POST_INSTALL = : NORMAL_UNINSTALL = : PRE_UNINSTALL = : POST_UNINSTALL = : noinst_PROGRAMS = test-basic$(EXEEXT) test-parse$(EXEEXT) \ test-yts$(EXEEXT) test-emit$(EXEEXT) subdir = tests DIST_COMMON = $(srcdir)/Makefile.am $(srcdir)/Makefile.in ACLOCAL_M4 = $(top_srcdir)/aclocal.m4 am__aclocal_m4_deps = $(top_srcdir)/configure.in am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \ $(ACLOCAL_M4) mkinstalldirs = $(install_sh) -d CONFIG_HEADER = $(top_builddir)/config.h CONFIG_CLEAN_FILES = PROGRAMS = $(noinst_PROGRAMS) am_test_basic_OBJECTS = Basic.$(OBJEXT) CuTest.$(OBJEXT) test_basic_OBJECTS = $(am_test_basic_OBJECTS) test_basic_DEPENDENCIES = am_test_emit_OBJECTS = Emit.$(OBJEXT) CuTest.$(OBJEXT) test_emit_OBJECTS = $(am_test_emit_OBJECTS) test_emit_DEPENDENCIES = am_test_parse_OBJECTS = Parse.$(OBJEXT) CuTest.$(OBJEXT) test_parse_OBJECTS = $(am_test_parse_OBJECTS) test_parse_DEPENDENCIES = am_test_yts_OBJECTS = YTS.$(OBJEXT) CuTest.$(OBJEXT) test_yts_OBJECTS = $(am_test_yts_OBJECTS) test_yts_DEPENDENCIES = DEFAULT_INCLUDES = -I. -I$(srcdir) -I$(top_builddir) depcomp = $(SHELL) $(top_srcdir)/config/depcomp am__depfiles_maybe = depfiles COMPILE = $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) \ $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) CCLD = $(CC) LINK = $(CCLD) $(AM_CFLAGS) $(CFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@ SOURCES = $(test_basic_SOURCES) $(test_emit_SOURCES) \ $(test_parse_SOURCES) $(test_yts_SOURCES) DIST_SOURCES = $(test_basic_SOURCES) $(test_emit_SOURCES) \ $(test_parse_SOURCES) $(test_yts_SOURCES) ETAGS = etags CTAGS = ctags DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST) ACLOCAL = @ACLOCAL@ AMDEP_FALSE = @AMDEP_FALSE@ AMDEP_TRUE = @AMDEP_TRUE@ AMTAR = @AMTAR@ AUTOCONF = @AUTOCONF@ AUTOHEADER = @AUTOHEADER@ AUTOMAKE = @AUTOMAKE@ AWK = @AWK@ CC = @CC@ CCDEPMODE = @CCDEPMODE@ CFLAGS = @CFLAGS@ CPP = @CPP@ CPPFLAGS = @CPPFLAGS@ CYGPATH_W = @CYGPATH_W@ DEFS = @DEFS@ DEPDIR = @DEPDIR@ ECHO_C = @ECHO_C@ ECHO_N = @ECHO_N@ ECHO_T = @ECHO_T@ EGREP = @EGREP@ EXEEXT = @EXEEXT@ INSTALL_DATA = @INSTALL_DATA@ INSTALL_PROGRAM = @INSTALL_PROGRAM@ INSTALL_SCRIPT = @INSTALL_SCRIPT@ INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@ LDFLAGS = -L$(top_srcdir)/lib LEX = @LEX@ LEXLIB = @LEXLIB@ LEX_OUTPUT_ROOT = @LEX_OUTPUT_ROOT@ LIBOBJS = @LIBOBJS@ LIBS = @LIBS@ LN_S = @LN_S@ LTLIBOBJS = @LTLIBOBJS@ MAKEINFO = @MAKEINFO@ OBJEXT = @OBJEXT@ PACKAGE = @PACKAGE@ PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@ PACKAGE_NAME = @PACKAGE_NAME@ PACKAGE_STRING = @PACKAGE_STRING@ PACKAGE_TARNAME = @PACKAGE_TARNAME@ PACKAGE_VERSION = @PACKAGE_VERSION@ PATH_SEPARATOR = @PATH_SEPARATOR@ RANLIB = @RANLIB@ SET_MAKE = @SET_MAKE@ SHELL = @SHELL@ STRIP = @STRIP@ VERSION = @VERSION@ YACC = @YACC@ ac_ct_CC = @ac_ct_CC@ ac_ct_RANLIB = @ac_ct_RANLIB@ ac_ct_STRIP = @ac_ct_STRIP@ am__fastdepCC_FALSE = @am__fastdepCC_FALSE@ am__fastdepCC_TRUE = @am__fastdepCC_TRUE@ am__include = @am__include@ am__leading_dot = @am__leading_dot@ am__quote = @am__quote@ am__tar = @am__tar@ am__untar = @am__untar@ bindir = @bindir@ build_alias = @build_alias@ datadir = @datadir@ exec_prefix = @exec_prefix@ host_alias = @host_alias@ includedir = @includedir@ infodir = @infodir@ install_sh = @install_sh@ libdir = @libdir@ libexecdir = @libexecdir@ localstatedir = @localstatedir@ mandir = @mandir@ mkdir_p = @mkdir_p@ oldincludedir = @oldincludedir@ prefix = @prefix@ program_transform_name = @program_transform_name@ sbindir = @sbindir@ sharedstatedir = @sharedstatedir@ sysconfdir = @sysconfdir@ target_alias = @target_alias@ INCLUDES = -I$(top_srcdir)/lib TESTS = test-basic test-parse test-yts test-emit test_basic_SOURCES = Basic.c CuTest.c CuTest.h test_basic_LDADD = -lsyck test_parse_SOURCES = Parse.c CuTest.c CuTest.h test_parse_LDADD = -lsyck test_yts_SOURCES = YTS.c CuTest.c CuTest.h test_yts_LDADD = -lsyck test_emit_SOURCES = Emit.c CuTest.c CuTest.h test_emit_LDADD = -lsyck all: all-am .SUFFIXES: .SUFFIXES: .c .o .obj $(srcdir)/Makefile.in: $(srcdir)/Makefile.am $(am__configure_deps) @for dep in $?; do \ case '$(am__configure_deps)' in \ *$$dep*) \ cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh \ && exit 0; \ exit 1;; \ esac; \ done; \ echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign tests/Makefile'; \ cd $(top_srcdir) && \ $(AUTOMAKE) --foreign tests/Makefile .PRECIOUS: Makefile Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status @case '$?' in \ *config.status*) \ cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh;; \ *) \ echo ' cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe)'; \ cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe);; \ esac; $(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(top_srcdir)/configure: $(am__configure_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(ACLOCAL_M4): $(am__aclocal_m4_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh clean-noinstPROGRAMS: -test -z "$(noinst_PROGRAMS)" || rm -f $(noinst_PROGRAMS) test-basic$(EXEEXT): $(test_basic_OBJECTS) $(test_basic_DEPENDENCIES) @rm -f test-basic$(EXEEXT) $(LINK) $(test_basic_LDFLAGS) $(test_basic_OBJECTS) $(test_basic_LDADD) $(LIBS) test-emit$(EXEEXT): $(test_emit_OBJECTS) $(test_emit_DEPENDENCIES) @rm -f test-emit$(EXEEXT) $(LINK) $(test_emit_LDFLAGS) $(test_emit_OBJECTS) $(test_emit_LDADD) $(LIBS) test-parse$(EXEEXT): $(test_parse_OBJECTS) $(test_parse_DEPENDENCIES) @rm -f test-parse$(EXEEXT) $(LINK) $(test_parse_LDFLAGS) $(test_parse_OBJECTS) $(test_parse_LDADD) $(LIBS) test-yts$(EXEEXT): $(test_yts_OBJECTS) $(test_yts_DEPENDENCIES) @rm -f test-yts$(EXEEXT) $(LINK) $(test_yts_LDFLAGS) $(test_yts_OBJECTS) $(test_yts_LDADD) $(LIBS) mostlyclean-compile: -rm -f *.$(OBJEXT) distclean-compile: -rm -f *.tab.c @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/Basic.Po@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/CuTest.Po@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/Emit.Po@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/Parse.Po@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/YTS.Po@am__quote@ .c.o: @am__fastdepCC_TRUE@ if $(COMPILE) -MT $@ -MD -MP -MF "$(DEPDIR)/$*.Tpo" -c -o $@ $<; \ @am__fastdepCC_TRUE@ then mv -f "$(DEPDIR)/$*.Tpo" "$(DEPDIR)/$*.Po"; else rm -f "$(DEPDIR)/$*.Tpo"; exit 1; fi @AMDEP_TRUE@@am__fastdepCC_FALSE@ source='$<' object='$@' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCC_FALSE@ $(COMPILE) -c $< .c.obj: @am__fastdepCC_TRUE@ if $(COMPILE) -MT $@ -MD -MP -MF "$(DEPDIR)/$*.Tpo" -c -o $@ `$(CYGPATH_W) '$<'`; \ @am__fastdepCC_TRUE@ then mv -f "$(DEPDIR)/$*.Tpo" "$(DEPDIR)/$*.Po"; else rm -f "$(DEPDIR)/$*.Tpo"; exit 1; fi @AMDEP_TRUE@@am__fastdepCC_FALSE@ source='$<' object='$@' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCC_FALSE@ $(COMPILE) -c `$(CYGPATH_W) '$<'` uninstall-info-am: ID: $(HEADERS) $(SOURCES) $(LISP) $(TAGS_FILES) list='$(SOURCES) $(HEADERS) $(LISP) $(TAGS_FILES)'; \ unique=`for i in $$list; do \ if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ done | \ $(AWK) ' { files[$$0] = 1; } \ END { for (i in files) print i; }'`; \ mkid -fID $$unique tags: TAGS TAGS: $(HEADERS) $(SOURCES) $(TAGS_DEPENDENCIES) \ $(TAGS_FILES) $(LISP) tags=; \ here=`pwd`; \ list='$(SOURCES) $(HEADERS) $(LISP) $(TAGS_FILES)'; \ unique=`for i in $$list; do \ if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ done | \ $(AWK) ' { files[$$0] = 1; } \ END { for (i in files) print i; }'`; \ if test -z "$(ETAGS_ARGS)$$tags$$unique"; then :; else \ test -n "$$unique" || unique=$$empty_fix; \ $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ $$tags $$unique; \ fi ctags: CTAGS CTAGS: $(HEADERS) $(SOURCES) $(TAGS_DEPENDENCIES) \ $(TAGS_FILES) $(LISP) tags=; \ here=`pwd`; \ list='$(SOURCES) $(HEADERS) $(LISP) $(TAGS_FILES)'; \ unique=`for i in $$list; do \ if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ done | \ $(AWK) ' { files[$$0] = 1; } \ END { for (i in files) print i; }'`; \ test -z "$(CTAGS_ARGS)$$tags$$unique" \ || $(CTAGS) $(CTAGSFLAGS) $(AM_CTAGSFLAGS) $(CTAGS_ARGS) \ $$tags $$unique GTAGS: here=`$(am__cd) $(top_builddir) && pwd` \ && cd $(top_srcdir) \ && gtags -i $(GTAGS_ARGS) $$here distclean-tags: -rm -f TAGS ID GTAGS GRTAGS GSYMS GPATH tags check-TESTS: $(TESTS) @failed=0; all=0; xfail=0; xpass=0; skip=0; \ srcdir=$(srcdir); export srcdir; \ list='$(TESTS)'; \ if test -n "$$list"; then \ for tst in $$list; do \ if test -f ./$$tst; then dir=./; \ elif test -f $$tst; then dir=; \ else dir="$(srcdir)/"; fi; \ if $(TESTS_ENVIRONMENT) $${dir}$$tst; then \ all=`expr $$all + 1`; \ case " $(XFAIL_TESTS) " in \ *" $$tst "*) \ xpass=`expr $$xpass + 1`; \ failed=`expr $$failed + 1`; \ echo "XPASS: $$tst"; \ ;; \ *) \ echo "PASS: $$tst"; \ ;; \ esac; \ elif test $$? -ne 77; then \ all=`expr $$all + 1`; \ case " $(XFAIL_TESTS) " in \ *" $$tst "*) \ xfail=`expr $$xfail + 1`; \ echo "XFAIL: $$tst"; \ ;; \ *) \ failed=`expr $$failed + 1`; \ echo "FAIL: $$tst"; \ ;; \ esac; \ else \ skip=`expr $$skip + 1`; \ echo "SKIP: $$tst"; \ fi; \ done; \ if test "$$failed" -eq 0; then \ if test "$$xfail" -eq 0; then \ banner="All $$all tests passed"; \ else \ banner="All $$all tests behaved as expected ($$xfail expected failures)"; \ fi; \ else \ if test "$$xpass" -eq 0; then \ banner="$$failed of $$all tests failed"; \ else \ banner="$$failed of $$all tests did not behave as expected ($$xpass unexpected passes)"; \ fi; \ fi; \ dashes="$$banner"; \ skipped=""; \ if test "$$skip" -ne 0; then \ skipped="($$skip tests were not run)"; \ test `echo "$$skipped" | wc -c` -le `echo "$$banner" | wc -c` || \ dashes="$$skipped"; \ fi; \ report=""; \ if test "$$failed" -ne 0 && test -n "$(PACKAGE_BUGREPORT)"; then \ report="Please report to $(PACKAGE_BUGREPORT)"; \ test `echo "$$report" | wc -c` -le `echo "$$banner" | wc -c` || \ dashes="$$report"; \ fi; \ dashes=`echo "$$dashes" | sed s/./=/g`; \ echo "$$dashes"; \ echo "$$banner"; \ test -z "$$skipped" || echo "$$skipped"; \ test -z "$$report" || echo "$$report"; \ echo "$$dashes"; \ test "$$failed" -eq 0; \ else :; fi distdir: $(DISTFILES) @srcdirstrip=`echo "$(srcdir)" | sed 's|.|.|g'`; \ topsrcdirstrip=`echo "$(top_srcdir)" | sed 's|.|.|g'`; \ list='$(DISTFILES)'; for file in $$list; do \ case $$file in \ $(srcdir)/*) file=`echo "$$file" | sed "s|^$$srcdirstrip/||"`;; \ $(top_srcdir)/*) file=`echo "$$file" | sed "s|^$$topsrcdirstrip/|$(top_builddir)/|"`;; \ esac; \ if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \ dir=`echo "$$file" | sed -e 's,/[^/]*$$,,'`; \ if test "$$dir" != "$$file" && test "$$dir" != "."; then \ dir="/$$dir"; \ $(mkdir_p) "$(distdir)$$dir"; \ else \ dir=''; \ fi; \ if test -d $$d/$$file; then \ if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \ cp -pR $(srcdir)/$$file $(distdir)$$dir || exit 1; \ fi; \ cp -pR $$d/$$file $(distdir)$$dir || exit 1; \ else \ test -f $(distdir)/$$file \ || cp -p $$d/$$file $(distdir)/$$file \ || exit 1; \ fi; \ done check-am: all-am $(MAKE) $(AM_MAKEFLAGS) check-TESTS check: check-am all-am: Makefile $(PROGRAMS) installdirs: install: install-am install-exec: install-exec-am install-data: install-data-am uninstall: uninstall-am install-am: all-am @$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am installcheck: installcheck-am install-strip: $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ `test -z '$(STRIP)' || \ echo "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'"` install mostlyclean-generic: clean-generic: distclean-generic: -test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES) maintainer-clean-generic: @echo "This command is intended for maintainers to use" @echo "it deletes files that may require special tools to rebuild." clean: clean-am clean-am: clean-generic clean-noinstPROGRAMS mostlyclean-am distclean: distclean-am -rm -rf ./$(DEPDIR) -rm -f Makefile distclean-am: clean-am distclean-compile distclean-generic \ distclean-tags dvi: dvi-am dvi-am: html: html-am info: info-am info-am: install-data-am: install-exec-am: install-info: install-info-am install-man: installcheck-am: maintainer-clean: maintainer-clean-am -rm -rf ./$(DEPDIR) -rm -f Makefile maintainer-clean-am: distclean-am maintainer-clean-generic mostlyclean: mostlyclean-am mostlyclean-am: mostlyclean-compile mostlyclean-generic pdf: pdf-am pdf-am: ps: ps-am ps-am: uninstall-am: uninstall-info-am .PHONY: CTAGS GTAGS all all-am check check-TESTS check-am clean \ clean-generic clean-noinstPROGRAMS ctags distclean \ distclean-compile distclean-generic distclean-tags distdir dvi \ dvi-am html html-am info info-am install install-am \ install-data install-data-am install-exec install-exec-am \ install-info install-info-am install-man install-strip \ installcheck installcheck-am installdirs maintainer-clean \ maintainer-clean-generic mostlyclean mostlyclean-compile \ mostlyclean-generic pdf pdf-am ps ps-am tags uninstall \ uninstall-am uninstall-info-am # Tell versions [3.59,3.63) of GNU make to not export all variables. # Otherwise a system limit (for SysV at least) may be exceeded. .NOEXPORT: ruby-mkrf-0.2.3.orig/test/sample_files/syck-0.55/tests/YTS.c.rb0000644000000000000000000000206011672453175022447 0ustar rootroot# # YTS.c.rb # # $Author: why $ # $Date: 2005/01/02 04:33:40 $ # # Copyright (C) 2004 why the lucky stiff # # This Ruby script generates the YTS suite for the # Syck base lib. Basically, it searches ext/ruby/yts/ for # tests with a 'syck' entry. # # To regenerate things yourself: # # ruby YTS.c.rb > YTS.c # # Oh and your Ruby must have YAML installed. # require 'erb' require 'yaml' # Find the Syck directory. yts_dir = "ext/ruby/yts/" syck_dir = "" while File.expand_path( syck_dir ) != "/" break if File.directory?( syck_dir + yts_dir ) syck_dir << "../" end yts_dir = syck_dir + yts_dir abort "No YTS directory found" unless File.directory?( yts_dir ) # Load the YTS syck_tests = [] YAML::load( File.open( yts_dir + "index.yml" ) ).each do |yst| ct = 0 YAML.each_document( File.open( yts_dir + yst + ".yml" ) ) do |ydoc| ydoc['group'] = yst ydoc['func'] = "#{ ydoc['group'] }_#{ ct }" syck_tests << ydoc if ydoc['syck'] ct += 1 end end puts ERB.new( File.read( syck_dir + "tests/YTS.c.erb" ), 0, "%<>" ).result ruby-mkrf-0.2.3.orig/test/sample_files/syck-0.55/tests/Makefile0000644000000000000000000003506211672453175022672 0ustar rootroot# Makefile.in generated by automake 1.9.5 from Makefile.am. # tests/Makefile. Generated from Makefile.in by configure. # Copyright (C) 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002, # 2003, 2004, 2005 Free Software Foundation, Inc. # This Makefile.in is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY, to the extent permitted by law; without # even the implied warranty of MERCHANTABILITY or FITNESS FOR A # PARTICULAR PURPOSE. SOURCES = $(test_basic_SOURCES) $(test_emit_SOURCES) $(test_parse_SOURCES) $(test_yts_SOURCES) srcdir = . top_srcdir = .. pkgdatadir = $(datadir)/syck pkglibdir = $(libdir)/syck pkgincludedir = $(includedir)/syck top_builddir = .. am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd INSTALL = /usr/bin/install -c install_sh_DATA = $(install_sh) -c -m 644 install_sh_PROGRAM = $(install_sh) -c install_sh_SCRIPT = $(install_sh) -c INSTALL_HEADER = $(INSTALL_DATA) transform = $(program_transform_name) NORMAL_INSTALL = : PRE_INSTALL = : POST_INSTALL = : NORMAL_UNINSTALL = : PRE_UNINSTALL = : POST_UNINSTALL = : noinst_PROGRAMS = test-basic$(EXEEXT) test-parse$(EXEEXT) \ test-yts$(EXEEXT) test-emit$(EXEEXT) subdir = tests DIST_COMMON = $(srcdir)/Makefile.am $(srcdir)/Makefile.in ACLOCAL_M4 = $(top_srcdir)/aclocal.m4 am__aclocal_m4_deps = $(top_srcdir)/configure.in am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \ $(ACLOCAL_M4) mkinstalldirs = $(install_sh) -d CONFIG_HEADER = $(top_builddir)/config.h CONFIG_CLEAN_FILES = PROGRAMS = $(noinst_PROGRAMS) am_test_basic_OBJECTS = Basic.$(OBJEXT) CuTest.$(OBJEXT) test_basic_OBJECTS = $(am_test_basic_OBJECTS) test_basic_DEPENDENCIES = am_test_emit_OBJECTS = Emit.$(OBJEXT) CuTest.$(OBJEXT) test_emit_OBJECTS = $(am_test_emit_OBJECTS) test_emit_DEPENDENCIES = am_test_parse_OBJECTS = Parse.$(OBJEXT) CuTest.$(OBJEXT) test_parse_OBJECTS = $(am_test_parse_OBJECTS) test_parse_DEPENDENCIES = am_test_yts_OBJECTS = YTS.$(OBJEXT) CuTest.$(OBJEXT) test_yts_OBJECTS = $(am_test_yts_OBJECTS) test_yts_DEPENDENCIES = DEFAULT_INCLUDES = -I. -I$(srcdir) -I$(top_builddir) depcomp = $(SHELL) $(top_srcdir)/config/depcomp am__depfiles_maybe = depfiles COMPILE = $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) \ $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) CCLD = $(CC) LINK = $(CCLD) $(AM_CFLAGS) $(CFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@ SOURCES = $(test_basic_SOURCES) $(test_emit_SOURCES) \ $(test_parse_SOURCES) $(test_yts_SOURCES) DIST_SOURCES = $(test_basic_SOURCES) $(test_emit_SOURCES) \ $(test_parse_SOURCES) $(test_yts_SOURCES) ETAGS = etags CTAGS = ctags DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST) ACLOCAL = ${SHELL} /home/why/sand/syck-0.55/config/missing --run aclocal-1.9 AMDEP_FALSE = # AMDEP_TRUE = AMTAR = ${SHELL} /home/why/sand/syck-0.55/config/missing --run tar AUTOCONF = ${SHELL} /home/why/sand/syck-0.55/config/missing --run autoconf AUTOHEADER = ${SHELL} /home/why/sand/syck-0.55/config/missing --run autoheader AUTOMAKE = ${SHELL} /home/why/sand/syck-0.55/config/missing --run automake-1.9 AWK = gawk CC = gcc CCDEPMODE = depmode=gcc3 CFLAGS = -g -O2 CPP = gcc -E CPPFLAGS = CYGPATH_W = echo DEFS = -DHAVE_CONFIG_H DEPDIR = .deps ECHO_C = ECHO_N = -n ECHO_T = EGREP = grep -E EXEEXT = INSTALL_DATA = ${INSTALL} -m 644 INSTALL_PROGRAM = ${INSTALL} INSTALL_SCRIPT = ${INSTALL} INSTALL_STRIP_PROGRAM = ${SHELL} $(install_sh) -c -s LDFLAGS = -L$(top_srcdir)/lib LEX = flex LEXLIB = -lfl LEX_OUTPUT_ROOT = lex.yy LIBOBJS = LIBS = LN_S = ln -s LTLIBOBJS = MAKEINFO = ${SHELL} /home/why/sand/syck-0.55/config/missing --run makeinfo OBJEXT = o PACKAGE = syck PACKAGE_BUGREPORT = PACKAGE_NAME = syck PACKAGE_STRING = syck 0.54 PACKAGE_TARNAME = syck PACKAGE_VERSION = 0.54 PATH_SEPARATOR = : RANLIB = ranlib SET_MAKE = SHELL = /bin/sh STRIP = VERSION = 0.54 YACC = bison -y ac_ct_CC = gcc ac_ct_RANLIB = ranlib ac_ct_STRIP = am__fastdepCC_FALSE = # am__fastdepCC_TRUE = am__include = include am__leading_dot = . am__quote = am__tar = ${AMTAR} chof - "$$tardir" am__untar = ${AMTAR} xf - bindir = ${exec_prefix}/bin build_alias = datadir = ${prefix}/share exec_prefix = ${prefix} host_alias = includedir = ${prefix}/include infodir = ${prefix}/info install_sh = /home/why/sand/syck-0.55/config/install-sh libdir = ${exec_prefix}/lib libexecdir = ${exec_prefix}/libexec localstatedir = ${prefix}/var mandir = ${prefix}/man mkdir_p = mkdir -p -- oldincludedir = /usr/include prefix = /usr/local program_transform_name = s,x,x, sbindir = ${exec_prefix}/sbin sharedstatedir = ${prefix}/com sysconfdir = ${prefix}/etc target_alias = INCLUDES = -I$(top_srcdir)/lib TESTS = test-basic test-parse test-yts test-emit test_basic_SOURCES = Basic.c CuTest.c CuTest.h test_basic_LDADD = -lsyck test_parse_SOURCES = Parse.c CuTest.c CuTest.h test_parse_LDADD = -lsyck test_yts_SOURCES = YTS.c CuTest.c CuTest.h test_yts_LDADD = -lsyck test_emit_SOURCES = Emit.c CuTest.c CuTest.h test_emit_LDADD = -lsyck all: all-am .SUFFIXES: .SUFFIXES: .c .o .obj $(srcdir)/Makefile.in: $(srcdir)/Makefile.am $(am__configure_deps) @for dep in $?; do \ case '$(am__configure_deps)' in \ *$$dep*) \ cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh \ && exit 0; \ exit 1;; \ esac; \ done; \ echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign tests/Makefile'; \ cd $(top_srcdir) && \ $(AUTOMAKE) --foreign tests/Makefile .PRECIOUS: Makefile Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status @case '$?' in \ *config.status*) \ cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh;; \ *) \ echo ' cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe)'; \ cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe);; \ esac; $(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(top_srcdir)/configure: $(am__configure_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(ACLOCAL_M4): $(am__aclocal_m4_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh clean-noinstPROGRAMS: -test -z "$(noinst_PROGRAMS)" || rm -f $(noinst_PROGRAMS) test-basic$(EXEEXT): $(test_basic_OBJECTS) $(test_basic_DEPENDENCIES) @rm -f test-basic$(EXEEXT) $(LINK) $(test_basic_LDFLAGS) $(test_basic_OBJECTS) $(test_basic_LDADD) $(LIBS) test-emit$(EXEEXT): $(test_emit_OBJECTS) $(test_emit_DEPENDENCIES) @rm -f test-emit$(EXEEXT) $(LINK) $(test_emit_LDFLAGS) $(test_emit_OBJECTS) $(test_emit_LDADD) $(LIBS) test-parse$(EXEEXT): $(test_parse_OBJECTS) $(test_parse_DEPENDENCIES) @rm -f test-parse$(EXEEXT) $(LINK) $(test_parse_LDFLAGS) $(test_parse_OBJECTS) $(test_parse_LDADD) $(LIBS) test-yts$(EXEEXT): $(test_yts_OBJECTS) $(test_yts_DEPENDENCIES) @rm -f test-yts$(EXEEXT) $(LINK) $(test_yts_LDFLAGS) $(test_yts_OBJECTS) $(test_yts_LDADD) $(LIBS) mostlyclean-compile: -rm -f *.$(OBJEXT) distclean-compile: -rm -f *.tab.c include ./$(DEPDIR)/Basic.Po include ./$(DEPDIR)/CuTest.Po include ./$(DEPDIR)/Emit.Po include ./$(DEPDIR)/Parse.Po include ./$(DEPDIR)/YTS.Po .c.o: if $(COMPILE) -MT $@ -MD -MP -MF "$(DEPDIR)/$*.Tpo" -c -o $@ $<; \ then mv -f "$(DEPDIR)/$*.Tpo" "$(DEPDIR)/$*.Po"; else rm -f "$(DEPDIR)/$*.Tpo"; exit 1; fi # source='$<' object='$@' libtool=no \ # DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) \ # $(COMPILE) -c $< .c.obj: if $(COMPILE) -MT $@ -MD -MP -MF "$(DEPDIR)/$*.Tpo" -c -o $@ `$(CYGPATH_W) '$<'`; \ then mv -f "$(DEPDIR)/$*.Tpo" "$(DEPDIR)/$*.Po"; else rm -f "$(DEPDIR)/$*.Tpo"; exit 1; fi # source='$<' object='$@' libtool=no \ # DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) \ # $(COMPILE) -c `$(CYGPATH_W) '$<'` uninstall-info-am: ID: $(HEADERS) $(SOURCES) $(LISP) $(TAGS_FILES) list='$(SOURCES) $(HEADERS) $(LISP) $(TAGS_FILES)'; \ unique=`for i in $$list; do \ if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ done | \ $(AWK) ' { files[$$0] = 1; } \ END { for (i in files) print i; }'`; \ mkid -fID $$unique tags: TAGS TAGS: $(HEADERS) $(SOURCES) $(TAGS_DEPENDENCIES) \ $(TAGS_FILES) $(LISP) tags=; \ here=`pwd`; \ list='$(SOURCES) $(HEADERS) $(LISP) $(TAGS_FILES)'; \ unique=`for i in $$list; do \ if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ done | \ $(AWK) ' { files[$$0] = 1; } \ END { for (i in files) print i; }'`; \ if test -z "$(ETAGS_ARGS)$$tags$$unique"; then :; else \ test -n "$$unique" || unique=$$empty_fix; \ $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ $$tags $$unique; \ fi ctags: CTAGS CTAGS: $(HEADERS) $(SOURCES) $(TAGS_DEPENDENCIES) \ $(TAGS_FILES) $(LISP) tags=; \ here=`pwd`; \ list='$(SOURCES) $(HEADERS) $(LISP) $(TAGS_FILES)'; \ unique=`for i in $$list; do \ if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ done | \ $(AWK) ' { files[$$0] = 1; } \ END { for (i in files) print i; }'`; \ test -z "$(CTAGS_ARGS)$$tags$$unique" \ || $(CTAGS) $(CTAGSFLAGS) $(AM_CTAGSFLAGS) $(CTAGS_ARGS) \ $$tags $$unique GTAGS: here=`$(am__cd) $(top_builddir) && pwd` \ && cd $(top_srcdir) \ && gtags -i $(GTAGS_ARGS) $$here distclean-tags: -rm -f TAGS ID GTAGS GRTAGS GSYMS GPATH tags check-TESTS: $(TESTS) @failed=0; all=0; xfail=0; xpass=0; skip=0; \ srcdir=$(srcdir); export srcdir; \ list='$(TESTS)'; \ if test -n "$$list"; then \ for tst in $$list; do \ if test -f ./$$tst; then dir=./; \ elif test -f $$tst; then dir=; \ else dir="$(srcdir)/"; fi; \ if $(TESTS_ENVIRONMENT) $${dir}$$tst; then \ all=`expr $$all + 1`; \ case " $(XFAIL_TESTS) " in \ *" $$tst "*) \ xpass=`expr $$xpass + 1`; \ failed=`expr $$failed + 1`; \ echo "XPASS: $$tst"; \ ;; \ *) \ echo "PASS: $$tst"; \ ;; \ esac; \ elif test $$? -ne 77; then \ all=`expr $$all + 1`; \ case " $(XFAIL_TESTS) " in \ *" $$tst "*) \ xfail=`expr $$xfail + 1`; \ echo "XFAIL: $$tst"; \ ;; \ *) \ failed=`expr $$failed + 1`; \ echo "FAIL: $$tst"; \ ;; \ esac; \ else \ skip=`expr $$skip + 1`; \ echo "SKIP: $$tst"; \ fi; \ done; \ if test "$$failed" -eq 0; then \ if test "$$xfail" -eq 0; then \ banner="All $$all tests passed"; \ else \ banner="All $$all tests behaved as expected ($$xfail expected failures)"; \ fi; \ else \ if test "$$xpass" -eq 0; then \ banner="$$failed of $$all tests failed"; \ else \ banner="$$failed of $$all tests did not behave as expected ($$xpass unexpected passes)"; \ fi; \ fi; \ dashes="$$banner"; \ skipped=""; \ if test "$$skip" -ne 0; then \ skipped="($$skip tests were not run)"; \ test `echo "$$skipped" | wc -c` -le `echo "$$banner" | wc -c` || \ dashes="$$skipped"; \ fi; \ report=""; \ if test "$$failed" -ne 0 && test -n "$(PACKAGE_BUGREPORT)"; then \ report="Please report to $(PACKAGE_BUGREPORT)"; \ test `echo "$$report" | wc -c` -le `echo "$$banner" | wc -c` || \ dashes="$$report"; \ fi; \ dashes=`echo "$$dashes" | sed s/./=/g`; \ echo "$$dashes"; \ echo "$$banner"; \ test -z "$$skipped" || echo "$$skipped"; \ test -z "$$report" || echo "$$report"; \ echo "$$dashes"; \ test "$$failed" -eq 0; \ else :; fi distdir: $(DISTFILES) @srcdirstrip=`echo "$(srcdir)" | sed 's|.|.|g'`; \ topsrcdirstrip=`echo "$(top_srcdir)" | sed 's|.|.|g'`; \ list='$(DISTFILES)'; for file in $$list; do \ case $$file in \ $(srcdir)/*) file=`echo "$$file" | sed "s|^$$srcdirstrip/||"`;; \ $(top_srcdir)/*) file=`echo "$$file" | sed "s|^$$topsrcdirstrip/|$(top_builddir)/|"`;; \ esac; \ if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \ dir=`echo "$$file" | sed -e 's,/[^/]*$$,,'`; \ if test "$$dir" != "$$file" && test "$$dir" != "."; then \ dir="/$$dir"; \ $(mkdir_p) "$(distdir)$$dir"; \ else \ dir=''; \ fi; \ if test -d $$d/$$file; then \ if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \ cp -pR $(srcdir)/$$file $(distdir)$$dir || exit 1; \ fi; \ cp -pR $$d/$$file $(distdir)$$dir || exit 1; \ else \ test -f $(distdir)/$$file \ || cp -p $$d/$$file $(distdir)/$$file \ || exit 1; \ fi; \ done check-am: all-am $(MAKE) $(AM_MAKEFLAGS) check-TESTS check: check-am all-am: Makefile $(PROGRAMS) installdirs: install: install-am install-exec: install-exec-am install-data: install-data-am uninstall: uninstall-am install-am: all-am @$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am installcheck: installcheck-am install-strip: $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ `test -z '$(STRIP)' || \ echo "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'"` install mostlyclean-generic: clean-generic: distclean-generic: -test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES) maintainer-clean-generic: @echo "This command is intended for maintainers to use" @echo "it deletes files that may require special tools to rebuild." clean: clean-am clean-am: clean-generic clean-noinstPROGRAMS mostlyclean-am distclean: distclean-am -rm -rf ./$(DEPDIR) -rm -f Makefile distclean-am: clean-am distclean-compile distclean-generic \ distclean-tags dvi: dvi-am dvi-am: html: html-am info: info-am info-am: install-data-am: install-exec-am: install-info: install-info-am install-man: installcheck-am: maintainer-clean: maintainer-clean-am -rm -rf ./$(DEPDIR) -rm -f Makefile maintainer-clean-am: distclean-am maintainer-clean-generic mostlyclean: mostlyclean-am mostlyclean-am: mostlyclean-compile mostlyclean-generic pdf: pdf-am pdf-am: ps: ps-am ps-am: uninstall-am: uninstall-info-am .PHONY: CTAGS GTAGS all all-am check check-TESTS check-am clean \ clean-generic clean-noinstPROGRAMS ctags distclean \ distclean-compile distclean-generic distclean-tags distdir dvi \ dvi-am html html-am info info-am install install-am \ install-data install-data-am install-exec install-exec-am \ install-info install-info-am install-man install-strip \ installcheck installcheck-am installdirs maintainer-clean \ maintainer-clean-generic mostlyclean mostlyclean-compile \ mostlyclean-generic pdf pdf-am ps ps-am tags uninstall \ uninstall-am uninstall-info-am # Tell versions [3.59,3.63) of GNU make to not export all variables. # Otherwise a system limit (for SysV at least) may be exceeded. .NOEXPORT: ruby-mkrf-0.2.3.orig/test/sample_files/syck-0.55/tests/Parse.c0000644000000000000000000001214411672453175022444 0ustar rootroot// // Parse.c // // $Author: whythluckystiff $ // $Date: 2003/07/29 16:54:35 $ // // Copyright (C) 2003 why the lucky stiff // #include #include "syck.h" #include "CuTest.h" // // 1. Test the buffering -- read 4 bytes at a time // void TestSyckReadString( CuTest *tc ) { SyckParser *parser; char *tmp; int len = 0; parser = syck_new_parser(); syck_parser_str_auto( parser, "test: 1\nand: 2\nalso: 3", syck_io_str_read ); len = syck_parser_readlen( parser, 4 ); CuAssert( tc, "Wrong length, line 1.", 4 == len ); parser->token = parser->buffer + 4; tmp = syck_strndup( parser->buffer, len ); CuAssertStrEquals( tc, "test", tmp ); free( tmp ); len = syck_parser_readlen( parser, 4 ); CuAssert( tc, "Wrong length, line 2.", 4 == len ); parser->token = parser->buffer + 4; tmp = syck_strndup( parser->buffer, len ); CuAssertStrEquals( tc, ": 1\n", tmp ); free( tmp ); len = syck_parser_readlen( parser, 4 ); CuAssert( tc, "Wrong length, line 3.", 4 == len ); parser->token = parser->buffer + 4; tmp = syck_strndup( parser->buffer, len ); CuAssertStrEquals( tc, "and:", tmp ); free( tmp ); len = syck_parser_readlen( parser, 4 ); CuAssert( tc, "Wrong length, line 4.", 4 == len ); parser->token = parser->buffer + 4; tmp = syck_strndup( parser->buffer, len ); CuAssertStrEquals( tc, " 2\na", tmp ); free( tmp ); len = syck_parser_readlen( parser, 4 ); CuAssert( tc, "Wrong length, line 5.", 4 == len ); parser->token = parser->buffer + 4; tmp = syck_strndup( parser->buffer, len ); CuAssertStrEquals( tc, "lso:", tmp ); free( tmp ); len = syck_parser_readlen( parser, 4 ); CuAssert( tc, "Wrong length, line 6.", 2 == len ); parser->token = parser->buffer + 4; tmp = syck_strndup( parser->buffer, len ); CuAssertStrEquals( tc, " 3", tmp ); free( tmp ); free_any_io( parser ); syck_free_parser( parser ); } // // 2. Test parsing a simple string and handler // SYMID SyckParseStringHandler( SyckParser *p, SyckNode *n ) { if ( n->kind != syck_str_kind ) return 100; if ( strcmp( syck_str_read( n ), "a_test_string" ) != 0 ) return 200; return 1112; } void TestSyckParseString( CuTest *tc ) { SyckParser *parser; SYMID id; parser = syck_new_parser(); syck_parser_handler( parser, SyckParseStringHandler ); syck_parser_str_auto( parser, "--- a_test_string", NULL ); id = syck_parse( parser ); CuAssert( tc, "Handler returned incorrect value.", 1112 == id ); syck_free_parser( parser ); } // // 3. // SYMID SyckParseString2Handler( SyckParser *p, SyckNode *n ) { if ( n->kind != syck_str_kind ) return 100; if ( strcmp( syck_str_read( n ), "a_test_string" ) != 0 ) return 200; return 1112; } enum st_retval ListAnchors( char *key, SyckNode *n, CuTest *tc ) { char *sd = syck_strndup( n->data.str->ptr, n->data.str->len ); CuAssertStrEquals( tc, "test", key ); CuAssertStrEquals( tc, "13", sd ); free( sd ); return ST_CONTINUE; } void TestSyckParseString2( CuTest *tc ) { SyckParser *parser; parser = syck_new_parser(); syck_parser_handler( parser, SyckParseStringHandler ); syck_parser_str_auto( parser, "--- {test: 1, and: 2, or: &test 13}", NULL ); syckparse( parser ); st_foreach( parser->anchors, ListAnchors, tc ); syck_free_parser( parser ); } void TestSyckParseMap( CuTest *tc ) { SYMID id; SyckParser *parser; parser = syck_new_parser(); syck_parser_handler( parser, SyckParseStringHandler ); syck_parser_str_auto( parser, "\ntest: 1\nand: 2\nor:\n test: 1\n and: 2\n fourdepth:\n deep: 1\nlast: end", NULL ); syck_parse( parser ); syck_free_parser( parser ); } void TestSyckParseFold( CuTest *tc ) { SYMID id; SyckParser *parser; parser = syck_new_parser(); syck_parser_handler( parser, SyckParseStringHandler ); syck_parser_str_auto( parser, "\ntest: |\n deep: 1\nlast: end\n \n", NULL ); syck_parse( parser ); syck_free_parser( parser ); } void TestSyckParseMultidoc( CuTest *tc ) { SYMID id; SyckParser *parser; parser = syck_new_parser(); syck_parser_handler( parser, SyckParseStringHandler ); syck_parser_str_auto( parser, "---\ntest: |\n deep: 1\n---\nlast: end\n \n", NULL ); syck_parse( parser ); syck_parse( parser ); syck_free_parser( parser ); } CuSuite * SyckGetSuite() { CuSuite *suite = CuSuiteNew(); SUITE_ADD_TEST( suite, TestSyckReadString ); SUITE_ADD_TEST( suite, TestSyckParseString ); SUITE_ADD_TEST( suite, TestSyckParseString2 ); SUITE_ADD_TEST( suite, TestSyckParseMap ); SUITE_ADD_TEST( suite, TestSyckParseFold ); SUITE_ADD_TEST( suite, TestSyckParseMultidoc ); return suite; } int main(void) { CuString *output = CuStringNew(); CuSuite* suite = SyckGetSuite(); int count; CuSuiteRun(suite); CuSuiteSummary(suite, output); CuSuiteDetails(suite, output); printf("%s\n", output->buffer); count = suite->failCount; CuStringFree( output ); CuSuiteFree( suite ); return count; } ruby-mkrf-0.2.3.orig/test/sample_files/syck-0.55/Makefile0000644000000000000000000004356711672453175021541 0ustar rootroot# Makefile.in generated by automake 1.9.5 from Makefile.am. # Makefile. Generated from Makefile.in by configure. # Copyright (C) 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002, # 2003, 2004, 2005 Free Software Foundation, Inc. # This Makefile.in is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY, to the extent permitted by law; without # even the implied warranty of MERCHANTABILITY or FITNESS FOR A # PARTICULAR PURPOSE. srcdir = . top_srcdir = . pkgdatadir = $(datadir)/syck pkglibdir = $(libdir)/syck pkgincludedir = $(includedir)/syck top_builddir = . am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd INSTALL = /usr/bin/install -c install_sh_DATA = $(install_sh) -c -m 644 install_sh_PROGRAM = $(install_sh) -c install_sh_SCRIPT = $(install_sh) -c INSTALL_HEADER = $(INSTALL_DATA) transform = $(program_transform_name) NORMAL_INSTALL = : PRE_INSTALL = : POST_INSTALL = : NORMAL_UNINSTALL = : PRE_UNINSTALL = : POST_UNINSTALL = : subdir = . DIST_COMMON = README $(am__configure_deps) $(srcdir)/Makefile.am \ $(srcdir)/Makefile.in $(srcdir)/config.h.in \ $(top_srcdir)/configure COPYING TODO config/README \ config/depcomp config/install-sh config/missing ACLOCAL_M4 = $(top_srcdir)/aclocal.m4 am__aclocal_m4_deps = $(top_srcdir)/configure.in am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \ $(ACLOCAL_M4) am__CONFIG_DISTCLEAN_FILES = config.status config.cache config.log \ configure.lineno configure.status.lineno mkinstalldirs = $(install_sh) -d CONFIG_HEADER = config.h CONFIG_CLEAN_FILES = SOURCES = DIST_SOURCES = RECURSIVE_TARGETS = all-recursive check-recursive dvi-recursive \ html-recursive info-recursive install-data-recursive \ install-exec-recursive install-info-recursive \ install-recursive installcheck-recursive installdirs-recursive \ pdf-recursive ps-recursive uninstall-info-recursive \ uninstall-recursive ETAGS = etags CTAGS = ctags DIST_SUBDIRS = $(SUBDIRS) DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST) distdir = $(PACKAGE)-$(VERSION) top_distdir = $(distdir) am__remove_distdir = \ { test ! -d $(distdir) \ || { find $(distdir) -type d ! -perm -200 -exec chmod u+w {} ';' \ && rm -fr $(distdir); }; } DIST_ARCHIVES = $(distdir).tar.gz GZIP_ENV = --best distuninstallcheck_listfiles = find . -type f -print distcleancheck_listfiles = find . -type f -print ACLOCAL = ${SHELL} /home/why/sand/syck-0.55/config/missing --run aclocal-1.9 AMDEP_FALSE = # AMDEP_TRUE = AMTAR = ${SHELL} /home/why/sand/syck-0.55/config/missing --run tar AUTOCONF = ${SHELL} /home/why/sand/syck-0.55/config/missing --run autoconf AUTOHEADER = ${SHELL} /home/why/sand/syck-0.55/config/missing --run autoheader AUTOMAKE = ${SHELL} /home/why/sand/syck-0.55/config/missing --run automake-1.9 AWK = gawk CC = gcc CCDEPMODE = depmode=gcc3 CFLAGS = -g -O2 CPP = gcc -E CPPFLAGS = CYGPATH_W = echo DEFS = -DHAVE_CONFIG_H DEPDIR = .deps ECHO_C = ECHO_N = -n ECHO_T = EGREP = grep -E EXEEXT = INSTALL_DATA = ${INSTALL} -m 644 INSTALL_PROGRAM = ${INSTALL} INSTALL_SCRIPT = ${INSTALL} INSTALL_STRIP_PROGRAM = ${SHELL} $(install_sh) -c -s LDFLAGS = LEX = flex LEXLIB = -lfl LEX_OUTPUT_ROOT = lex.yy LIBOBJS = LIBS = LN_S = ln -s LTLIBOBJS = MAKEINFO = ${SHELL} /home/why/sand/syck-0.55/config/missing --run makeinfo OBJEXT = o PACKAGE = syck PACKAGE_BUGREPORT = PACKAGE_NAME = syck PACKAGE_STRING = syck 0.54 PACKAGE_TARNAME = syck PACKAGE_VERSION = 0.54 PATH_SEPARATOR = : RANLIB = ranlib SET_MAKE = SHELL = /bin/sh STRIP = VERSION = 0.54 YACC = bison -y ac_ct_CC = gcc ac_ct_RANLIB = ranlib ac_ct_STRIP = am__fastdepCC_FALSE = # am__fastdepCC_TRUE = am__include = include am__leading_dot = . am__quote = am__tar = ${AMTAR} chof - "$$tardir" am__untar = ${AMTAR} xf - bindir = ${exec_prefix}/bin build_alias = datadir = ${prefix}/share exec_prefix = ${prefix} host_alias = includedir = ${prefix}/include infodir = ${prefix}/info install_sh = /home/why/sand/syck-0.55/config/install-sh libdir = ${exec_prefix}/lib libexecdir = ${exec_prefix}/libexec localstatedir = ${prefix}/var mandir = ${prefix}/man mkdir_p = mkdir -p -- oldincludedir = /usr/include prefix = /usr/local program_transform_name = s,x,x, sbindir = ${exec_prefix}/sbin sharedstatedir = ${prefix}/com sysconfdir = ${prefix}/etc target_alias = # # I feel like saying, "The magic happens here!" But it doesn't. # SUBDIRS = lib tests all: config.h $(MAKE) $(AM_MAKEFLAGS) all-recursive .SUFFIXES: am--refresh: @: $(srcdir)/Makefile.in: $(srcdir)/Makefile.am $(am__configure_deps) @for dep in $?; do \ case '$(am__configure_deps)' in \ *$$dep*) \ echo ' cd $(srcdir) && $(AUTOMAKE) --foreign '; \ cd $(srcdir) && $(AUTOMAKE) --foreign \ && exit 0; \ exit 1;; \ esac; \ done; \ echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign Makefile'; \ cd $(top_srcdir) && \ $(AUTOMAKE) --foreign Makefile .PRECIOUS: Makefile Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status @case '$?' in \ *config.status*) \ echo ' $(SHELL) ./config.status'; \ $(SHELL) ./config.status;; \ *) \ echo ' cd $(top_builddir) && $(SHELL) ./config.status $@ $(am__depfiles_maybe)'; \ cd $(top_builddir) && $(SHELL) ./config.status $@ $(am__depfiles_maybe);; \ esac; $(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES) $(SHELL) ./config.status --recheck $(top_srcdir)/configure: $(am__configure_deps) cd $(srcdir) && $(AUTOCONF) $(ACLOCAL_M4): $(am__aclocal_m4_deps) cd $(srcdir) && $(ACLOCAL) $(ACLOCAL_AMFLAGS) config.h: stamp-h1 @if test ! -f $@; then \ rm -f stamp-h1; \ $(MAKE) stamp-h1; \ else :; fi stamp-h1: $(srcdir)/config.h.in $(top_builddir)/config.status @rm -f stamp-h1 cd $(top_builddir) && $(SHELL) ./config.status config.h $(srcdir)/config.h.in: $(am__configure_deps) cd $(top_srcdir) && $(AUTOHEADER) rm -f stamp-h1 touch $@ distclean-hdr: -rm -f config.h stamp-h1 uninstall-info-am: # This directory's subdirectories are mostly independent; you can cd # into them and run `make' without going through this Makefile. # To change the values of `make' variables: instead of editing Makefiles, # (1) if the variable is set in `config.status', edit `config.status' # (which will cause the Makefiles to be regenerated when you run `make'); # (2) otherwise, pass the desired values on the `make' command line. $(RECURSIVE_TARGETS): @failcom='exit 1'; \ for f in x $$MAKEFLAGS; do \ case $$f in \ *=* | --[!k]*);; \ *k*) failcom='fail=yes';; \ esac; \ done; \ dot_seen=no; \ target=`echo $@ | sed s/-recursive//`; \ list='$(SUBDIRS)'; for subdir in $$list; do \ echo "Making $$target in $$subdir"; \ if test "$$subdir" = "."; then \ dot_seen=yes; \ local_target="$$target-am"; \ else \ local_target="$$target"; \ fi; \ (cd $$subdir && $(MAKE) $(AM_MAKEFLAGS) $$local_target) \ || eval $$failcom; \ done; \ if test "$$dot_seen" = "no"; then \ $(MAKE) $(AM_MAKEFLAGS) "$$target-am" || exit 1; \ fi; test -z "$$fail" mostlyclean-recursive clean-recursive distclean-recursive \ maintainer-clean-recursive: @failcom='exit 1'; \ for f in x $$MAKEFLAGS; do \ case $$f in \ *=* | --[!k]*);; \ *k*) failcom='fail=yes';; \ esac; \ done; \ dot_seen=no; \ case "$@" in \ distclean-* | maintainer-clean-*) list='$(DIST_SUBDIRS)' ;; \ *) list='$(SUBDIRS)' ;; \ esac; \ rev=''; for subdir in $$list; do \ if test "$$subdir" = "."; then :; else \ rev="$$subdir $$rev"; \ fi; \ done; \ rev="$$rev ."; \ target=`echo $@ | sed s/-recursive//`; \ for subdir in $$rev; do \ echo "Making $$target in $$subdir"; \ if test "$$subdir" = "."; then \ local_target="$$target-am"; \ else \ local_target="$$target"; \ fi; \ (cd $$subdir && $(MAKE) $(AM_MAKEFLAGS) $$local_target) \ || eval $$failcom; \ done && test -z "$$fail" tags-recursive: list='$(SUBDIRS)'; for subdir in $$list; do \ test "$$subdir" = . || (cd $$subdir && $(MAKE) $(AM_MAKEFLAGS) tags); \ done ctags-recursive: list='$(SUBDIRS)'; for subdir in $$list; do \ test "$$subdir" = . || (cd $$subdir && $(MAKE) $(AM_MAKEFLAGS) ctags); \ done ID: $(HEADERS) $(SOURCES) $(LISP) $(TAGS_FILES) list='$(SOURCES) $(HEADERS) $(LISP) $(TAGS_FILES)'; \ unique=`for i in $$list; do \ if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ done | \ $(AWK) ' { files[$$0] = 1; } \ END { for (i in files) print i; }'`; \ mkid -fID $$unique tags: TAGS TAGS: tags-recursive $(HEADERS) $(SOURCES) config.h.in $(TAGS_DEPENDENCIES) \ $(TAGS_FILES) $(LISP) tags=; \ here=`pwd`; \ if ($(ETAGS) --etags-include --version) >/dev/null 2>&1; then \ include_option=--etags-include; \ empty_fix=.; \ else \ include_option=--include; \ empty_fix=; \ fi; \ list='$(SUBDIRS)'; for subdir in $$list; do \ if test "$$subdir" = .; then :; else \ test ! -f $$subdir/TAGS || \ tags="$$tags $$include_option=$$here/$$subdir/TAGS"; \ fi; \ done; \ list='$(SOURCES) $(HEADERS) config.h.in $(LISP) $(TAGS_FILES)'; \ unique=`for i in $$list; do \ if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ done | \ $(AWK) ' { files[$$0] = 1; } \ END { for (i in files) print i; }'`; \ if test -z "$(ETAGS_ARGS)$$tags$$unique"; then :; else \ test -n "$$unique" || unique=$$empty_fix; \ $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ $$tags $$unique; \ fi ctags: CTAGS CTAGS: ctags-recursive $(HEADERS) $(SOURCES) config.h.in $(TAGS_DEPENDENCIES) \ $(TAGS_FILES) $(LISP) tags=; \ here=`pwd`; \ list='$(SOURCES) $(HEADERS) config.h.in $(LISP) $(TAGS_FILES)'; \ unique=`for i in $$list; do \ if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ done | \ $(AWK) ' { files[$$0] = 1; } \ END { for (i in files) print i; }'`; \ test -z "$(CTAGS_ARGS)$$tags$$unique" \ || $(CTAGS) $(CTAGSFLAGS) $(AM_CTAGSFLAGS) $(CTAGS_ARGS) \ $$tags $$unique GTAGS: here=`$(am__cd) $(top_builddir) && pwd` \ && cd $(top_srcdir) \ && gtags -i $(GTAGS_ARGS) $$here distclean-tags: -rm -f TAGS ID GTAGS GRTAGS GSYMS GPATH tags distdir: $(DISTFILES) $(am__remove_distdir) mkdir $(distdir) $(mkdir_p) $(distdir)/config @srcdirstrip=`echo "$(srcdir)" | sed 's|.|.|g'`; \ topsrcdirstrip=`echo "$(top_srcdir)" | sed 's|.|.|g'`; \ list='$(DISTFILES)'; for file in $$list; do \ case $$file in \ $(srcdir)/*) file=`echo "$$file" | sed "s|^$$srcdirstrip/||"`;; \ $(top_srcdir)/*) file=`echo "$$file" | sed "s|^$$topsrcdirstrip/|$(top_builddir)/|"`;; \ esac; \ if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \ dir=`echo "$$file" | sed -e 's,/[^/]*$$,,'`; \ if test "$$dir" != "$$file" && test "$$dir" != "."; then \ dir="/$$dir"; \ $(mkdir_p) "$(distdir)$$dir"; \ else \ dir=''; \ fi; \ if test -d $$d/$$file; then \ if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \ cp -pR $(srcdir)/$$file $(distdir)$$dir || exit 1; \ fi; \ cp -pR $$d/$$file $(distdir)$$dir || exit 1; \ else \ test -f $(distdir)/$$file \ || cp -p $$d/$$file $(distdir)/$$file \ || exit 1; \ fi; \ done list='$(DIST_SUBDIRS)'; for subdir in $$list; do \ if test "$$subdir" = .; then :; else \ test -d "$(distdir)/$$subdir" \ || $(mkdir_p) "$(distdir)/$$subdir" \ || exit 1; \ distdir=`$(am__cd) $(distdir) && pwd`; \ top_distdir=`$(am__cd) $(top_distdir) && pwd`; \ (cd $$subdir && \ $(MAKE) $(AM_MAKEFLAGS) \ top_distdir="$$top_distdir" \ distdir="$$distdir/$$subdir" \ distdir) \ || exit 1; \ fi; \ done -find $(distdir) -type d ! -perm -777 -exec chmod a+rwx {} \; -o \ ! -type d ! -perm -444 -links 1 -exec chmod a+r {} \; -o \ ! -type d ! -perm -400 -exec chmod a+r {} \; -o \ ! -type d ! -perm -444 -exec $(SHELL) $(install_sh) -c -m a+r {} {} \; \ || chmod -R a+r $(distdir) dist-gzip: distdir tardir=$(distdir) && $(am__tar) | GZIP=$(GZIP_ENV) gzip -c >$(distdir).tar.gz $(am__remove_distdir) dist-bzip2: distdir tardir=$(distdir) && $(am__tar) | bzip2 -9 -c >$(distdir).tar.bz2 $(am__remove_distdir) dist-tarZ: distdir tardir=$(distdir) && $(am__tar) | compress -c >$(distdir).tar.Z $(am__remove_distdir) dist-shar: distdir shar $(distdir) | GZIP=$(GZIP_ENV) gzip -c >$(distdir).shar.gz $(am__remove_distdir) dist-zip: distdir -rm -f $(distdir).zip zip -rq $(distdir).zip $(distdir) $(am__remove_distdir) dist dist-all: distdir tardir=$(distdir) && $(am__tar) | GZIP=$(GZIP_ENV) gzip -c >$(distdir).tar.gz $(am__remove_distdir) # This target untars the dist file and tries a VPATH configuration. Then # it guarantees that the distribution is self-contained by making another # tarfile. distcheck: dist case '$(DIST_ARCHIVES)' in \ *.tar.gz*) \ GZIP=$(GZIP_ENV) gunzip -c $(distdir).tar.gz | $(am__untar) ;;\ *.tar.bz2*) \ bunzip2 -c $(distdir).tar.bz2 | $(am__untar) ;;\ *.tar.Z*) \ uncompress -c $(distdir).tar.Z | $(am__untar) ;;\ *.shar.gz*) \ GZIP=$(GZIP_ENV) gunzip -c $(distdir).shar.gz | unshar ;;\ *.zip*) \ unzip $(distdir).zip ;;\ esac chmod -R a-w $(distdir); chmod a+w $(distdir) mkdir $(distdir)/_build mkdir $(distdir)/_inst chmod a-w $(distdir) dc_install_base=`$(am__cd) $(distdir)/_inst && pwd | sed -e 's,^[^:\\/]:[\\/],/,'` \ && dc_destdir="$${TMPDIR-/tmp}/am-dc-$$$$/" \ && cd $(distdir)/_build \ && ../configure --srcdir=.. --prefix="$$dc_install_base" \ $(DISTCHECK_CONFIGURE_FLAGS) \ && $(MAKE) $(AM_MAKEFLAGS) \ && $(MAKE) $(AM_MAKEFLAGS) dvi \ && $(MAKE) $(AM_MAKEFLAGS) check \ && $(MAKE) $(AM_MAKEFLAGS) install \ && $(MAKE) $(AM_MAKEFLAGS) installcheck \ && $(MAKE) $(AM_MAKEFLAGS) uninstall \ && $(MAKE) $(AM_MAKEFLAGS) distuninstallcheck_dir="$$dc_install_base" \ distuninstallcheck \ && chmod -R a-w "$$dc_install_base" \ && ({ \ (cd ../.. && umask 077 && mkdir "$$dc_destdir") \ && $(MAKE) $(AM_MAKEFLAGS) DESTDIR="$$dc_destdir" install \ && $(MAKE) $(AM_MAKEFLAGS) DESTDIR="$$dc_destdir" uninstall \ && $(MAKE) $(AM_MAKEFLAGS) DESTDIR="$$dc_destdir" \ distuninstallcheck_dir="$$dc_destdir" distuninstallcheck; \ } || { rm -rf "$$dc_destdir"; exit 1; }) \ && rm -rf "$$dc_destdir" \ && $(MAKE) $(AM_MAKEFLAGS) dist \ && rm -rf $(DIST_ARCHIVES) \ && $(MAKE) $(AM_MAKEFLAGS) distcleancheck $(am__remove_distdir) @(echo "$(distdir) archives ready for distribution: "; \ list='$(DIST_ARCHIVES)'; for i in $$list; do echo $$i; done) | \ sed -e '1{h;s/./=/g;p;x;}' -e '$${p;x;}' distuninstallcheck: @cd $(distuninstallcheck_dir) \ && test `$(distuninstallcheck_listfiles) | wc -l` -le 1 \ || { echo "ERROR: files left after uninstall:" ; \ if test -n "$(DESTDIR)"; then \ echo " (check DESTDIR support)"; \ fi ; \ $(distuninstallcheck_listfiles) ; \ exit 1; } >&2 distcleancheck: distclean @if test '$(srcdir)' = . ; then \ echo "ERROR: distcleancheck can only run from a VPATH build" ; \ exit 1 ; \ fi @test `$(distcleancheck_listfiles) | wc -l` -eq 0 \ || { echo "ERROR: files left in build directory after distclean:" ; \ $(distcleancheck_listfiles) ; \ exit 1; } >&2 check-am: all-am check: check-recursive all-am: Makefile config.h installdirs: installdirs-recursive installdirs-am: install: install-recursive install-exec: install-exec-recursive install-data: install-data-recursive uninstall: uninstall-recursive install-am: all-am @$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am installcheck: installcheck-recursive install-strip: $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ `test -z '$(STRIP)' || \ echo "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'"` install mostlyclean-generic: clean-generic: distclean-generic: -test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES) maintainer-clean-generic: @echo "This command is intended for maintainers to use" @echo "it deletes files that may require special tools to rebuild." clean: clean-recursive clean-am: clean-generic mostlyclean-am distclean: distclean-recursive -rm -f $(am__CONFIG_DISTCLEAN_FILES) -rm -f Makefile distclean-am: clean-am distclean-generic distclean-hdr distclean-tags dvi: dvi-recursive dvi-am: html: html-recursive info: info-recursive info-am: install-data-am: install-exec-am: install-info: install-info-recursive install-man: installcheck-am: maintainer-clean: maintainer-clean-recursive -rm -f $(am__CONFIG_DISTCLEAN_FILES) -rm -rf $(top_srcdir)/autom4te.cache -rm -f Makefile maintainer-clean-am: distclean-am maintainer-clean-generic mostlyclean: mostlyclean-recursive mostlyclean-am: mostlyclean-generic pdf: pdf-recursive pdf-am: ps: ps-recursive ps-am: uninstall-am: uninstall-info-am uninstall-info: uninstall-info-recursive .PHONY: $(RECURSIVE_TARGETS) CTAGS GTAGS all all-am am--refresh check \ check-am clean clean-generic clean-recursive ctags \ ctags-recursive dist dist-all dist-bzip2 dist-gzip dist-shar \ dist-tarZ dist-zip distcheck distclean distclean-generic \ distclean-hdr distclean-recursive distclean-tags \ distcleancheck distdir distuninstallcheck dvi dvi-am html \ html-am info info-am install install-am install-data \ install-data-am install-exec install-exec-am install-info \ install-info-am install-man install-strip installcheck \ installcheck-am installdirs installdirs-am maintainer-clean \ maintainer-clean-generic maintainer-clean-recursive \ mostlyclean mostlyclean-generic mostlyclean-recursive pdf \ pdf-am ps ps-am tags tags-recursive uninstall uninstall-am \ uninstall-info-am # Tell versions [3.59,3.63) of GNU make to not export all variables. # Otherwise a system limit (for SysV at least) may be exceeded. .NOEXPORT: ruby-mkrf-0.2.3.orig/test/sample_files/syck-0.55/configure.in0000644000000000000000000000131511672453175022373 0ustar rootroot# Process this file with autoconf to produce a configure script. AC_INIT(syck, 0.54) AC_CONFIG_AUX_DIR(config) AC_PREREQ(2.50) AM_INIT_AUTOMAKE(syck, 0.54) AM_CONFIG_HEADER(config.h) # Checks for programs. AC_PROG_CC_STDC AC_PROG_INSTALL AC_PROG_LN_S AC_PROG_RANLIB AC_PROG_MAKE_SET AC_PROG_AWK AC_PROG_YACC AM_PROG_LEX # Checks for libraries. # Checks for header files. AC_HEADER_STDC AC_CHECK_HEADERS(alloca.h stdlib.h string.h) AC_CHECK_SIZEOF(int, 4) AC_CHECK_SIZEOF(long, 4) # Checks for typedefs, structures, and compiler characteristics. # Checks for library functions. AC_FUNC_MALLOC AC_FUNC_VPRINTF AC_CONFIG_FILES([Makefile lib/Makefile tests/Makefile]) AC_OUTPUT ruby-mkrf-0.2.3.orig/test/sample_files/syck-0.55/CHANGELOG0000644000000000000000000001355311672453175021303 0ustar rootroot--- %YAML:1.0 - version: 0.56 date: 2005-05-16 changes: ext/ruby/ext/syck/rubyext.c: - const_find, now locating class constants correctly. - YAML::Object class for loaded objects which have no corresponding class. - No anchors on simple strings. - Outputing of domain and private types and anchors properly. - Memory leak in mktime. lib/emitter.c: scale back seq-in-map on a typed seq. prevents the shift/reduce problem with the parser. - version: 0.55 date: 2005-04-14 changes: lib/emitter.c: - output inline collections. ensure proper quoting and no blocks in inline collections. - safe indentation of scalar blocks which include YAML document separators. lib/syck.h: styles for every type of node now. lib/gram.y: restructuring, trying to fix problem with typed seq-in-map shortcut nodes. lib/token.re: fixed parsing of inline seq closing char as closing map char and vice versa. ext/ruby/lib/yaml/rubytypes.rb: added styling of collections. to_yaml_style for every object in Ruby. ext/ruby/ext/syck/rubyext.c: ditto. - version: 0.54 date: 2005-04-07 changes: lib/emitter.c: scale back double-quoting of strings. get empty nulls working. lib/gram.y: allowing transfers, anchors on an empty. lib/implicit.re: removing 'y' and 'n' again!! lib/yaml.rb: repair YAML.transfer to work with taguris. lib/yaml/tag.rb: - allow taguri to be set using an accessor. - continue support of Object#to_yaml_type. ext/ruby/ext/syck/rubyext.c: - fixing PrivateType and DomainType objects. - new Resolver#tagurize converts type ids to taguris. (for backwards compatibility, since the new stuff uses strictly taguri.) - merging nobu's changes from Ruby trunk. ext/ruby/lib/yaml/rubytypes.rb: simplify taguri display. - version: 0.53 date: 2005-03-28 changes: README.EXT: more docs. ext/ruby/ext/syck/rubyext.c: - moved Object#yaml_new into the node_import and made it the default behavior. - the target_class is always called wih yaml_new, prepended a parameter, which is the klass. - loaded nodes through GenericResolver show their style. - YAML::Syck::Node#transform works. - bug with YAML::Syck::Emitter#level ext/ruby/lib/yaml.rb: reworking YAML::Stream to use the new emitter. ext/ruby/lib/yaml/stream.rb: ditto. ext/ruby/lib/yaml/rubytypes.rb: use of new yaml_new syntax. ext/ruby/lib/yaml/tag.rb: the tag_subclasses? method now shows up in the class. ext/ruby/yts/yts.rb: moved eval code out of the YAML module. ext/ruby/yts/*.yml: prepend YAML module onto all code that uses Stream, Omap, etc. lib/implicit.re: were 'y' and 'n' seriously omitted?? lib/node.c: added syck_seq_assign. - version: 0.52 date: 2005-03-27 changes: README.EXT: added detailed API docs for Syck!! ext/ruby/ext/syck/rubyext.c: consolidated all the diaspora of internal node types into the family below YAML::Syck::Node -- Map, Seq, Scalar -- all of whom are SyckNode structs pointing to Ruby data. lib/emitter.c: consolidated redundant block_styles struct into the scalar_style struct. (this means loaded nodes can now be sent back to emitter and preserve at least its very basic formatting.) lib/token.re: ditto. lib/node.c: new syck_replace_str methods and syck_empty_* methods for rewriting node contents, while keeping the ID and other setup info. lib/syck.h: reflect block_styles and new node functions. - version: 0.51 date: 2005-02-09 changes: tests/YTS.c: tests failing due to bad syck_emit_scalar call. ext/ruby/lib/yaml/rubytypes.rb: added Object#yaml_new. ext/ruby/ext/syck/rubyext.c: both yaml_new and yaml_initialize get called, should they be present. - version: 0.50 date: 2005-02-08 changes: lib/gram.y: freed up parser to accept any structure as a headless document! lib/emitter.c: renovated emitter, which allows an initial walk of the tree to check for anchors and tag prefixing. then, swift emission of nodes with folding routines and shortcut-checking built-in! ext/ruby/ext/syck/rubyext.c: added hooks for the new emitter. - version: 0.45 date: 2004-08-18 changes: CHANGELOG: Added. lib/Makefile.am: re2c compiling with bit vectors now. lib/syck.c: clear parser on init. thanks, ts. [ruby-core:02931] lib/implicit.re: - added sexagecimal float#base60. lib/token.re: - using newline_len to handline CR-LFs. "\000" was showing up on folded blocks which stopped at EOF. - buffer underflow. thanks, ts. [ruby-core:02929] - indentation absolutely ignored when processing flow collections. - plain scalars are trimmed if indentation follows in an ambiguous flow collection. - fixed problem with comments sharing line with opening of block sequence. ext/ruby/lib/yaml/compat.rb: moved all ruby 1.6 -> ruby 1.8 handling into compat.rb. ext/ruby/lib/yaml/baseemitter.rb: - simpler flow block code. - was forcing a mod value of zero at times, which kept some blocks from getting indentation. - double-quoted strings now are handled through the flow block code to increase readability. ext/ruby/lib/yaml/rubytypes.rb: - exceptions were using an older YAML.object_maker. [ruby-core:03080] - subtleties in emitting strings with opening whitespace. ext/ruby/lib/yaml.rb: - added YAML::load_file, YAML::parse_file. - added rdoc to beginning of lib. ext/ruby/ext/syck/rubyext.c: - set buffer after Data_Wrap_Struct to avoid possible GC. [ruby-talk:104835] - added float#base60 handling. ext/ruby/yts/YtsSpecificationExamples.yml: new examples from the 1.0 working draft. [http://yaml.org/spec/] ruby-mkrf-0.2.3.orig/test/sample_files/syck-0.55/lib/0000755000000000000000000000000011672453175020630 5ustar rootrootruby-mkrf-0.2.3.orig/test/sample_files/syck-0.55/lib/bytecode.c0000644000000000000000000005306511672453175022603 0ustar rootroot/* Generated by re2c 0.9.3 on Tue Apr 12 20:34:14 2005 */ #line 1 "bytecode.re" /* * bytecode.re * * $Author: why $ * $Date: 2005/04/13 06:27:54 $ * * Copyright (C) 2003 why the lucky stiff */ #include "syck.h" #include "gram.h" #define QUOTELEN 128 /* * They do my bidding... */ #define YYCTYPE char #define YYCURSOR parser->cursor #define YYMARKER parser->marker #define YYLIMIT parser->limit #define YYTOKEN parser->token #define YYTOKTMP parser->toktmp #define YYLINEPTR parser->lineptr #define YYLINECTPTR parser->linectptr #define YYLINE parser->linect #define YYFILL(n) syck_parser_read(parser) extern SyckParser *syck_parser_ptr; char *get_inline( SyckParser *parser ); /* * Repositions the cursor at `n' offset from the token start. * Only works in `Header' and `Document' sections. */ #define YYPOS(n) YYCURSOR = YYTOKEN + n /* * Track line numbers */ #define CHK_NL(ptr) if ( *( ptr - 1 ) == '\n' && ptr > YYLINECTPTR ) { YYLINEPTR = ptr; YYLINE++; YYLINECTPTR = YYLINEPTR; } /* * I like seeing the level operations as macros... */ #define ADD_LEVEL(len, status) syck_parser_add_level( parser, len, status ) #define POP_LEVEL() syck_parser_pop_level( parser ) #define CURRENT_LEVEL() syck_parser_current_level( parser ) /* * Force a token next time around sycklex() */ #define FORCE_NEXT_TOKEN(tok) parser->force_token = tok; /* * Adding levels in bytecode requires us to make sure * we've got all our tokens worked out. */ #define ADD_BYTE_LEVEL(lvl, len, s ) \ switch ( lvl->status ) \ { \ case syck_lvl_seq: \ lvl->ncount++; \ ADD_LEVEL(len, syck_lvl_open); \ YYPOS(0); \ return '-'; \ \ case syck_lvl_map: \ lvl->ncount++; \ ADD_LEVEL(len, s); \ break; \ \ case syck_lvl_open: \ lvl->status = s; \ break; \ \ default: \ ADD_LEVEL(len, s); \ break; \ } /* * Nice little macro to ensure we're YAML_IOPENed to the current level. * * Only use this macro in the "Document" section * */ #define ENSURE_YAML_IOPEN(last_lvl, lvl_type, to_len, reset) \ if ( last_lvl->spaces < to_len ) \ { \ if ( last_lvl->status == syck_lvl_iseq || last_lvl->status == syck_lvl_imap ) \ { \ goto Document; \ } \ else \ { \ ADD_LEVEL( to_len, lvl_type ); \ if ( reset == 1 ) YYPOS(0); \ return YAML_IOPEN; \ } \ } /* * Nice little macro to ensure closure of levels. * * Only use this macro in the "Document" section * */ #define ENSURE_YAML_IEND(last_lvl, to_len) \ if ( last_lvl->spaces > to_len ) \ { \ syck_parser_pop_level( parser ); \ YYPOS(0); \ return YAML_IEND; \ } /* * Concatenates string items and manages allocation * to the string */ #define CAT(s, c, i, l) \ { \ if ( i + 1 >= c ) \ { \ c += QUOTELEN; \ S_REALLOC_N( s, char, c ); \ } \ s[i++] = l; \ s[i] = '\0'; \ } /* * Parser for standard YAML Bytecode [UTF-8] */ int sycklex_bytecode_utf8( YYSTYPE *sycklval, SyckParser *parser ) { SyckLevel *lvl; int doc_level = 0; syck_parser_ptr = parser; if ( YYCURSOR == NULL ) { syck_parser_read( parser ); } if ( parser->force_token != 0 ) { int t = parser->force_token; parser->force_token = 0; return t; } #line 173 "bytecode.re" lvl = CURRENT_LEVEL(); if ( lvl->status == syck_lvl_doc ) { goto Document; } Header: YYTOKEN = YYCURSOR; #line 7 "" { YYCTYPE yych; unsigned int yyaccept; goto yy0; yy1: ++YYCURSOR; yy0: if((YYLIMIT - YYCURSOR) < 3) YYFILL(3); yych = *YYCURSOR; switch(yych){ case '\000': goto yy2; case 'D': goto yy3; default: goto yy5; } yy2: YYCURSOR = YYMARKER; switch(yyaccept){ case 0: goto yy4; } yy3: yyaccept = 0; yych = *(YYMARKER = ++YYCURSOR); switch(yych){ case '\n': goto yy6; case '\r': goto yy8; default: goto yy4; } yy4: #line 200 "bytecode.re" { YYPOS(0); goto Document; } #line 37 "" yy5: yych = *++YYCURSOR; goto yy4; yy6: ++YYCURSOR; goto yy7; yy7: #line 187 "bytecode.re" { if ( lvl->status == syck_lvl_header ) { CHK_NL(YYCURSOR); goto Directive; } else { ENSURE_YAML_IEND(lvl, -1); YYPOS(0); return 0; } } #line 56 "" yy8: ++YYCURSOR; switch((yych = *YYCURSOR)) { case '\n': goto yy6; default: goto yy2; } } #line 204 "bytecode.re" Document: { lvl = CURRENT_LEVEL(); if ( lvl->status == syck_lvl_header ) { lvl->status = syck_lvl_doc; } YYTOKEN = YYCURSOR; #line 65 "" { YYCTYPE yych; unsigned int yyaccept; goto yy9; yy10: ++YYCURSOR; yy9: if((YYLIMIT - YYCURSOR) < 3) YYFILL(3); yych = *YYCURSOR; switch(yych){ case '\000': goto yy30; case '\n': goto yy27; case '\r': goto yy29; case 'A': goto yy19; case 'D': goto yy12; case 'E': goto yy16; case 'M': goto yy14; case 'P': goto yy13; case 'Q': goto yy15; case 'R': goto yy21; case 'S': goto yy17; case 'T': goto yy23; case 'c': goto yy25; default: goto yy11; } yy11:yy12: yych = *++YYCURSOR; switch(yych){ case '\n': goto yy41; case '\r': goto yy44; default: goto yy11; } yy13: yych = *++YYCURSOR; switch(yych){ case '\n': goto yy41; case '\r': goto yy43; default: goto yy11; } yy14: yych = *++YYCURSOR; switch(yych){ case '\n': goto yy38; case '\r': goto yy40; default: goto yy11; } yy15: yych = *++YYCURSOR; switch(yych){ case '\n': goto yy35; case '\r': goto yy37; default: goto yy11; } yy16: yych = *++YYCURSOR; switch(yych){ case '\n': goto yy32; case '\r': goto yy34; default: goto yy11; } yy17: ++YYCURSOR; goto yy18; yy18: #line 289 "bytecode.re" { ADD_BYTE_LEVEL(lvl, lvl->spaces + 1, syck_lvl_str); goto Scalar; } #line 127 "" yy19: ++YYCURSOR; goto yy20; yy20: #line 293 "bytecode.re" { ADD_BYTE_LEVEL(lvl, lvl->spaces + 1, syck_lvl_open); sycklval->name = get_inline( parser ); syck_hdlr_remove_anchor( parser, sycklval->name ); CHK_NL(YYCURSOR); return YAML_ANCHOR; } #line 138 "" yy21: ++YYCURSOR; goto yy22; yy22: #line 300 "bytecode.re" { ADD_BYTE_LEVEL(lvl, lvl->spaces + 1, syck_lvl_str); sycklval->name = get_inline( parser ); POP_LEVEL(); if ( *( YYCURSOR - 1 ) == '\n' ) YYCURSOR--; return YAML_ALIAS; } #line 149 "" yy23: ++YYCURSOR; goto yy24; yy24: #line 307 "bytecode.re" { char *qstr; ADD_BYTE_LEVEL(lvl, lvl->spaces + 1, syck_lvl_open); qstr = get_inline( parser ); CHK_NL(YYCURSOR); if ( qstr[0] == '!' ) { int qidx = strlen( qstr ); if ( qstr[1] == '\0' ) { free( qstr ); return YAML_ITRANSFER; } lvl = CURRENT_LEVEL(); /* * URL Prefixing */ if ( qstr[1] == '^' ) { sycklval->name = S_ALLOC_N( char, qidx + strlen( lvl->domain ) ); sycklval->name[0] = '\0'; strcat( sycklval->name, lvl->domain ); strncat( sycklval->name, qstr + 2, qidx - 2 ); free( qstr ); } else { char *carat = qstr + 1; char *qend = qstr + qidx; while ( (++carat) < qend ) { if ( *carat == '^' ) break; } if ( carat < qend ) { free( lvl->domain ); lvl->domain = syck_strndup( qstr + 1, carat - ( qstr + 1 ) ); sycklval->name = S_ALLOC_N( char, ( qend - carat ) + strlen( lvl->domain ) ); sycklval->name[0] = '\0'; strcat( sycklval->name, lvl->domain ); strncat( sycklval->name, carat + 1, ( qend - carat ) - 1 ); free( qstr ); } else { sycklval->name = S_ALLOC_N( char, strlen( qstr ) ); sycklval->name[0] = '\0'; S_MEMCPY( sycklval->name, qstr + 1, char, strlen( qstr ) ); free( qstr ); } } return YAML_TRANSFER; } sycklval->name = qstr; return YAML_TAGURI; } #line 213 "" yy25: ++YYCURSOR; goto yy26; yy26: #line 367 "bytecode.re" { goto Comment; } #line 219 "" yy27: ++YYCURSOR; goto yy28; yy28: #line 369 "bytecode.re" { CHK_NL(YYCURSOR); if ( lvl->status == syck_lvl_seq ) { return YAML_INDENT; } else if ( lvl->status == syck_lvl_map ) { if ( lvl->ncount % 2 == 1 ) return ':'; else return YAML_INDENT; } goto Document; } #line 236 "" yy29: yych = *++YYCURSOR; switch(yych){ case '\n': goto yy27; default: goto yy11; } yy30: ++YYCURSOR; goto yy31; yy31: #line 382 "bytecode.re" { ENSURE_YAML_IEND(lvl, -1); YYPOS(0); return 0; } #line 250 "" yy32: ++YYCURSOR; goto yy33; yy33: #line 253 "bytecode.re" { if ( lvl->status == syck_lvl_seq && lvl->ncount == 0 ) { lvl->ncount++; YYPOS(0); FORCE_NEXT_TOKEN( ']' ); return '['; } else if ( lvl->status == syck_lvl_map && lvl->ncount == 0 ) { lvl->ncount++; YYPOS(0); FORCE_NEXT_TOKEN( '}' ); return '{'; } POP_LEVEL(); lvl = CURRENT_LEVEL(); if ( lvl->status == syck_lvl_seq ) { FORCE_NEXT_TOKEN(YAML_INDENT); } else if ( lvl->status == syck_lvl_map ) { if ( lvl->ncount % 2 == 1 ) { FORCE_NEXT_TOKEN(':'); } else { FORCE_NEXT_TOKEN(YAML_INDENT); } } CHK_NL(YYCURSOR); return YAML_IEND; } #line 290 "" yy34: yych = *++YYCURSOR; switch(yych){ case '\n': goto yy32; default: goto yy11; } yy35: ++YYCURSOR; goto yy36; yy36: #line 238 "bytecode.re" { int complex = 0; if ( lvl->ncount % 2 == 0 && ( lvl->status == syck_lvl_map || lvl->status == syck_lvl_seq ) ) { complex = 1; } ADD_BYTE_LEVEL(lvl, lvl->spaces + 1, syck_lvl_seq); CHK_NL(YYCURSOR); if ( complex ) { FORCE_NEXT_TOKEN( YAML_IOPEN ); return '?'; } return YAML_IOPEN; } #line 314 "" yy37: yych = *++YYCURSOR; switch(yych){ case '\n': goto yy35; default: goto yy11; } yy38: ++YYCURSOR; goto yy39; yy39: #line 223 "bytecode.re" { int complex = 0; if ( lvl->ncount % 2 == 0 && ( lvl->status == syck_lvl_map || lvl->status == syck_lvl_seq ) ) { complex = 1; } ADD_BYTE_LEVEL(lvl, lvl->spaces + 1, syck_lvl_map); CHK_NL(YYCURSOR); if ( complex ) { FORCE_NEXT_TOKEN( YAML_IOPEN ); return '?'; } return YAML_IOPEN; } #line 338 "" yy40: yych = *++YYCURSOR; switch(yych){ case '\n': goto yy38; default: goto yy11; } yy41: ++YYCURSOR; goto yy42; yy42: #line 218 "bytecode.re" { ENSURE_YAML_IEND(lvl, -1); YYPOS(0); return 0; } #line 352 "" yy43: yych = *++YYCURSOR; switch(yych){ case '\n': goto yy41; default: goto yy11; } yy44: ++YYCURSOR; switch((yych = *YYCURSOR)) { case '\n': goto yy41; default: goto yy11; } } #line 387 "bytecode.re" } Directive: { YYTOKEN = YYCURSOR; #line 366 "" { YYCTYPE yych; unsigned int yyaccept; goto yy45; yy46: ++YYCURSOR; yy45: if((YYLIMIT - YYCURSOR) < 2) YYFILL(2); yych = *YYCURSOR; switch(yych){ case '\000': goto yy47; case 'V': goto yy48; default: goto yy50; } yy47: YYCURSOR = YYMARKER; switch(yyaccept){ case 0: goto yy49; } yy48: yyaccept = 0; yych = *(YYMARKER = ++YYCURSOR); switch(yych){ case '.': case '/': case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': case ':': case ';': case '<': case '=': case '>': case '?': case '@': case 'A': case 'B': case 'C': case 'D': case 'E': case 'F': case 'G': case 'H': case 'I': case 'J': case 'K': case 'L': case 'M': case 'N': case 'O': case 'P': case 'Q': case 'R': case 'S': case 'T': case 'U': case 'V': case 'W': case 'X': case 'Y': case 'Z': case '[': case '\\': case ']': case '^': case '_': case 'a': case 'b': case 'c': case 'd': case 'e': case 'f': case 'g': case 'h': case 'i': case 'j': case 'k': case 'l': case 'm': case 'n': case 'o': case 'p': case 'q': case 'r': case 's': case 't': case 'u': case 'v': case 'w': case 'x': case 'y': case 'z': goto yy51; default: goto yy49; } yy49: #line 400 "bytecode.re" { YYCURSOR = YYTOKEN; return YAML_DOCSEP; } #line 469 "" yy50: yych = *++YYCURSOR; goto yy49; yy51: ++YYCURSOR; if(YYLIMIT == YYCURSOR) YYFILL(1); yych = *YYCURSOR; goto yy52; yy52: switch(yych){ case '.': case '/': case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': case ';': case '<': case '=': case '>': case '?': case '@': case 'A': case 'B': case 'C': case 'D': case 'E': case 'F': case 'G': case 'H': case 'I': case 'J': case 'K': case 'L': case 'M': case 'N': case 'O': case 'P': case 'Q': case 'R': case 'S': case 'T': case 'U': case 'V': case 'W': case 'X': case 'Y': case 'Z': case '[': case '\\': case ']': case '^': case '_': case 'a': case 'b': case 'c': case 'd': case 'e': case 'f': case 'g': case 'h': case 'i': case 'j': case 'k': case 'l': case 'm': case 'n': case 'o': case 'p': case 'q': case 'r': case 's': case 't': case 'u': case 'v': case 'w': case 'x': case 'y': case 'z': goto yy51; case ':': goto yy53; default: goto yy47; } yy53: yych = *++YYCURSOR; switch(yych){ case '.': case '/': case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': case ':': case ';': case '<': case '=': case '>': case '?': case '@': case 'A': case 'B': case 'C': case 'D': case 'E': case 'F': case 'G': case 'H': case 'I': case 'J': case 'K': case 'L': case 'M': case 'N': case 'O': case 'P': case 'Q': case 'R': case 'S': case 'T': case 'U': case 'V': case 'W': case 'X': case 'Y': case 'Z': case '[': case '\\': case ']': case '^': case '_': case 'a': case 'b': case 'c': case 'd': case 'e': case 'f': case 'g': case 'h': case 'i': case 'j': case 'k': case 'l': case 'm': case 'n': case 'o': case 'p': case 'q': case 'r': case 's': case 't': case 'u': case 'v': case 'w': case 'x': case 'y': case 'z': goto yy54; default: goto yy47; } yy54: ++YYCURSOR; if(YYLIMIT == YYCURSOR) YYFILL(1); yych = *YYCURSOR; goto yy55; yy55: switch(yych){ case '\n': goto yy56; case '\r': goto yy58; case '.': case '/': case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': case ':': case ';': case '<': case '=': case '>': case '?': case '@': case 'A': case 'B': case 'C': case 'D': case 'E': case 'F': case 'G': case 'H': case 'I': case 'J': case 'K': case 'L': case 'M': case 'N': case 'O': case 'P': case 'Q': case 'R': case 'S': case 'T': case 'U': case 'V': case 'W': case 'X': case 'Y': case 'Z': case '[': case '\\': case ']': case '^': case '_': case 'a': case 'b': case 'c': case 'd': case 'e': case 'f': case 'g': case 'h': case 'i': case 'j': case 'k': case 'l': case 'm': case 'n': case 'o': case 'p': case 'q': case 'r': case 's': case 't': case 'u': case 'v': case 'w': case 'x': case 'y': case 'z': goto yy54; default: goto yy47; } yy56: ++YYCURSOR; goto yy57; yy57: #line 397 "bytecode.re" { CHK_NL(YYCURSOR); goto Directive; } #line 724 "" yy58: ++YYCURSOR; switch((yych = *YYCURSOR)) { case '\n': goto yy56; default: goto yy47; } } #line 403 "bytecode.re" } Comment: { YYTOKEN = YYCURSOR; #line 733 "" { YYCTYPE yych; unsigned int yyaccept; goto yy59; yy60: ++YYCURSOR; yy59: if((YYLIMIT - YYCURSOR) < 2) YYFILL(2); yych = *YYCURSOR; switch(yych){ case '\000': goto yy61; case '\n': goto yy62; case '\r': goto yy64; default: goto yy66; } yy61:yy62: ++YYCURSOR; goto yy63; yy63: #line 413 "bytecode.re" { CHK_NL(YYCURSOR); goto Document; } #line 754 "" yy64: ++YYCURSOR; switch((yych = *YYCURSOR)) { case '\n': goto yy67; default: goto yy65; } yy65: #line 416 "bytecode.re" { goto Comment; } #line 763 "" yy66: yych = *++YYCURSOR; goto yy65; yy67: ++YYCURSOR; yych = *YYCURSOR; goto yy63; } #line 418 "bytecode.re" } Scalar: { int idx = 0; int cap = 100; char *str = S_ALLOC_N( char, cap ); char *tok; str[0] = '\0'; Scalar2: tok = YYCURSOR; #line 771 "" { YYCTYPE yych; unsigned int yyaccept; goto yy68; yy69: ++YYCURSOR; yy68: if((YYLIMIT - YYCURSOR) < 3) YYFILL(3); yych = *YYCURSOR; switch(yych){ case '\000': goto yy74; case '\n': goto yy70; case '\r': goto yy72; default: goto yy76; } yy70: ++YYCURSOR; switch((yych = *YYCURSOR)) { case 'C': goto yy78; case 'N': goto yy80; case 'Z': goto yy83; default: goto yy71; } yy71: #line 462 "bytecode.re" { YYCURSOR = tok; goto ScalarEnd; } #line 798 "" yy72: ++YYCURSOR; switch((yych = *YYCURSOR)) { case '\n': goto yy77; default: goto yy73; } yy73: #line 470 "bytecode.re" { CAT(str, cap, idx, tok[0]); goto Scalar2; } #line 809 "" yy74: ++YYCURSOR; goto yy75; yy75: #line 466 "bytecode.re" { YYCURSOR = tok; goto ScalarEnd; } #line 817 "" yy76: yych = *++YYCURSOR; goto yy73; yy77: yych = *++YYCURSOR; switch(yych){ case 'C': goto yy78; case 'N': goto yy80; case 'Z': goto yy83; default: goto yy71; } yy78: ++YYCURSOR; goto yy79; yy79: #line 436 "bytecode.re" { CHK_NL(tok+1); goto Scalar2; } #line 833 "" yy80: ++YYCURSOR; if(YYLIMIT == YYCURSOR) YYFILL(1); yych = *YYCURSOR; goto yy81; yy81: switch(yych){ case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': goto yy80; default: goto yy82; } yy82: #line 439 "bytecode.re" { CHK_NL(tok+1); if ( tok + 2 < YYCURSOR ) { char *count = tok + 2; int total = strtod( count, NULL ); int i; for ( i = 0; i < total; i++ ) { CAT(str, cap, idx, '\n'); } } else { CAT(str, cap, idx, '\n'); } goto Scalar2; } #line 871 "" yy83: ++YYCURSOR; goto yy84; yy84: #line 457 "bytecode.re" { CHK_NL(tok+1); CAT(str, cap, idx, '\0'); goto Scalar2; } #line 880 "" } #line 474 "bytecode.re" ScalarEnd: { SyckNode *n = syck_alloc_str(); n->data.str->ptr = str; n->data.str->len = idx; sycklval->nodeData = n; POP_LEVEL(); if ( parser->implicit_typing == 1 ) { try_tag_implicit( sycklval->nodeData, parser->taguri_expansion ); } return YAML_PLAIN; } } } char * get_inline( SyckParser *parser ) { int idx = 0; int cap = 100; char *str = S_ALLOC_N( char, cap ); char *tok; str[0] = '\0'; Inline: { tok = YYCURSOR; #line 884 "" { YYCTYPE yych; unsigned int yyaccept; goto yy85; yy86: ++YYCURSOR; yy85: if((YYLIMIT - YYCURSOR) < 2) YYFILL(2); yych = *YYCURSOR; switch(yych){ case '\000': goto yy91; case '\n': goto yy87; case '\r': goto yy89; default: goto yy93; } yy87: ++YYCURSOR; goto yy88; yy88: #line 509 "bytecode.re" { CHK_NL(YYCURSOR); return str; } #line 905 "" yy89: ++YYCURSOR; switch((yych = *YYCURSOR)) { case '\n': goto yy94; default: goto yy90; } yy90: #line 516 "bytecode.re" { CAT(str, cap, idx, tok[0]); goto Inline; } #line 916 "" yy91: ++YYCURSOR; goto yy92; yy92: #line 512 "bytecode.re" { YYCURSOR = tok; return str; } #line 924 "" yy93: yych = *++YYCURSOR; goto yy90; yy94: ++YYCURSOR; yych = *YYCURSOR; goto yy88; } #line 520 "bytecode.re" } } ruby-mkrf-0.2.3.orig/test/sample_files/syck-0.55/lib/implicit.re0000644000000000000000000001254411672453175023000 0ustar rootroot/* * implicit.re * * $Author: why $ * $Date: 2005/04/06 17:18:59 $ * * Copyright (C) 2003 why the lucky stiff */ #include "syck.h" #define YYCTYPE char #define YYCURSOR cursor #define YYMARKER marker #define YYLIMIT limit #define YYFILL(n) void try_tag_implicit( SyckNode *n, int taguri ) { char *tid = ""; switch ( n->kind ) { case syck_str_kind: tid = syck_match_implicit( n->data.str->ptr, n->data.str->len ); break; case syck_seq_kind: tid = "seq"; break; case syck_map_kind: tid = "map"; break; } if ( n->type_id != NULL ) S_FREE( n->type_id ); if ( taguri == 1 ) { n->type_id = syck_taguri( YAML_DOMAIN, tid, strlen( tid ) ); } else { n->type_id = syck_strndup( tid, strlen( tid ) ); } } char *syck_match_implicit( char *str, size_t len ) { char *cursor, *limit, *marker; cursor = str; limit = str + len; /*!re2c NULL = [\000] ; ANY = [\001-\377] ; DIGIT = [0-9] ; DIGITSC = [0-9,] ; DIGITSP = [0-9.] ; YEAR = DIGIT DIGIT DIGIT DIGIT ; MON = DIGIT DIGIT ; SIGN = [-+] ; HEX = [0-9a-fA-F,] ; OCT = [0-7,] ; INTHEX = SIGN? "0x" HEX+ ; INTOCT = SIGN? "0" OCT+ ; INTSIXTY = SIGN? DIGIT DIGITSC* ( ":" [0-5]? DIGIT )+ ; INTCANON = SIGN? ( "0" | [1-9] DIGITSC* ) ; FLOATFIX = SIGN? DIGIT DIGITSC* "." DIGITSC* ; FLOATEXP = SIGN? DIGIT DIGITSC* "." DIGITSP* [eE] SIGN DIGIT+ ; FLOATSIXTY = SIGN? DIGIT DIGITSC* ( ":" [0-5]? DIGIT )+ "." DIGITSC* ; INF = ( "inf" | "Inf" | "INF" ) ; FLOATINF = [+]? "." INF ; FLOATNEGINF = [-] "." INF ; FLOATNAN = "." ( "nan" | "NaN" | "NAN" ) ; NULLTYPE = ( "~" | "null" | "Null" | "NULL" )? ; BOOLYES = ( "yes" | "Yes" | "YES" | "true" | "True" | "TRUE" | "on" | "On" | "ON" ) ; BOOLNO = ( "no" | "No" | "NO" | "false" | "False" | "FALSE" | "off" | "Off" | "OFF" ) ; TIMEZ = ( "Z" | [-+] DIGIT DIGIT ( ":" DIGIT DIGIT )? ) ; TIMEYMD = YEAR "-" MON "-" MON ; TIMEISO = YEAR "-" MON "-" MON [Tt] MON ":" MON ":" MON ( "." DIGIT* )? TIMEZ ; TIMESPACED = YEAR "-" MON "-" MON [ \t]+ MON ":" MON ":" MON ( "." DIGIT* )? [ \t]+ TIMEZ ; TIMECANON = YEAR "-" MON "-" MON "T" MON ":" MON ":" MON ( "." DIGIT* [1-9]+ )? "Z" ; MERGE = "<<" ; DEFAULTKEY = "=" ; NULLTYPE NULL { return "null"; } BOOLYES NULL { return "bool#yes"; } BOOLNO NULL { return "bool#no"; } INTHEX NULL { return "int#hex"; } INTOCT NULL { return "int#oct"; } INTSIXTY NULL { return "int#base60"; } INTCANON NULL { return "int"; } FLOATFIX NULL { return "float#fix"; } FLOATEXP NULL { return "float#exp"; } FLOATSIXTY NULL { return "float#base60"; } FLOATINF NULL { return "float#inf"; } FLOATNEGINF NULL { return "float#neginf"; } FLOATNAN NULL { return "float#nan"; } TIMEYMD NULL { return "timestamp#ymd"; } TIMEISO NULL { return "timestamp#iso8601"; } TIMESPACED NULL { return "timestamp#spaced"; } TIMECANON NULL { return "timestamp"; } DEFAULTKEY NULL { return "default"; } MERGE NULL { return "merge"; } ANY { return "str"; } */ } /* Remove ending fragment and compare types */ int syck_tagcmp( char *tag1, char *tag2 ) { if ( tag1 == tag2 ) return 1; if ( tag1 == NULL || tag2 == NULL ) return 0; else { int i; char *othorpe; char *tmp1 = syck_strndup( tag1, strlen( tag1 ) ); char *tmp2 = syck_strndup( tag2, strlen( tag2 ) ); othorpe = strstr( tmp1, "#" ); if ( othorpe != NULL ) { othorpe[0] = '\0'; } othorpe = strstr( tmp2, "#" ); if ( othorpe != NULL ) { othorpe[0] = '\0'; } i = strcmp( tmp1, tmp2 ); S_FREE( tmp1 ); S_FREE( tmp2 ); return i; } } char * syck_type_id_to_uri( char *type_id ) { char *cursor, *limit, *marker; cursor = type_id; limit = type_id + strlen( type_id ); /*!re2c TAG = "tag" ; XPRIVATE = "x-private" ; WD = [A-Za-z0-9_] ; WDD = [A-Za-z0-9_-] ; DNSCOMPRE = WD ( WDD* WD )? ; DNSNAMERE = ( ( DNSCOMPRE "." )+ DNSCOMPRE | DNSCOMPRE ) ; TAGDATE = YEAR ( "-" MON )? ( "-" MON )? ; TAG ":" DNSNAMERE "," TAGDATE ":" { return type_id; } XPRIVATE ":" { return type_id; } "!" { return syck_xprivate( type_id + 1, strlen( type_id ) - 1 ); } DNSNAMERE "/" { char *domain = S_ALLOC_N( char, ( YYCURSOR - type_id ) + 15 ); char *uri; domain[0] = '\0'; strncat( domain, type_id, ( YYCURSOR - type_id ) - 1 ); strcat( domain, "." ); strcat( domain, YAML_DOMAIN ); uri = syck_taguri( domain, YYCURSOR, YYLIMIT - YYCURSOR ); S_FREE( domain ); return uri; } DNSNAMERE "," TAGDATE "/" { char *domain = S_ALLOC_N( char, YYCURSOR - type_id ); char *uri; domain[0] = '\0'; strncat( domain, type_id, ( YYCURSOR - type_id ) - 1 ); uri = syck_taguri( domain, YYCURSOR, YYLIMIT - YYCURSOR ); S_FREE( domain ); return uri; } ANY { return syck_taguri( YAML_DOMAIN, type_id, strlen( type_id ) ); } */ } ruby-mkrf-0.2.3.orig/test/sample_files/syck-0.55/lib/token.re0000644000000000000000000010127611672453175022307 0ustar rootroot/* * token.re * * $Author: why $ * $Date: 2005/04/13 06:27:54 $ * * Copyright (C) 2003 why the lucky stiff */ #include "syck.h" #include "gram.h" /* * Allocate quoted strings in chunks */ #define QUOTELEN 1024 /* * They do my bidding... */ #define YYCTYPE char #define YYCURSOR parser->cursor #define YYMARKER parser->marker #define YYLIMIT parser->limit #define YYTOKEN parser->token #define YYTOKTMP parser->toktmp #define YYLINEPTR parser->lineptr #define YYLINECTPTR parser->linectptr #define YYLINE parser->linect #define YYFILL(n) syck_parser_read(parser) /* * Repositions the cursor at `n' offset from the token start. * Only works in `Header' and `Document' sections. */ #define YYPOS(n) YYCURSOR = YYTOKEN + n /* * Track line numbers */ #define NEWLINE(ptr) YYLINEPTR = ptr + newline_len(ptr); if ( YYLINEPTR > YYLINECTPTR ) { YYLINE++; YYLINECTPTR = YYLINEPTR; } /* * I like seeing the level operations as macros... */ #define ADD_LEVEL(len, status) syck_parser_add_level( parser, len, status ) #define POP_LEVEL() syck_parser_pop_level( parser ) #define CURRENT_LEVEL() syck_parser_current_level( parser ) /* * Force a token next time around sycklex() */ #define FORCE_NEXT_TOKEN(tok) parser->force_token = tok; /* * Nice little macro to ensure we're YAML_IOPENed to the current level. * * Only use this macro in the "Document" section * */ #define ENSURE_YAML_IOPEN(last_lvl, to_len, reset) \ if ( last_lvl->spaces < to_len ) \ { \ if ( last_lvl->status == syck_lvl_iseq || last_lvl->status == syck_lvl_imap ) \ { \ goto Document; \ } \ else \ { \ ADD_LEVEL( to_len, syck_lvl_doc ); \ if ( reset == 1 ) YYPOS(0); \ return YAML_IOPEN; \ } \ } /* * Nice little macro to ensure closure of levels. * * Only use this macro in the "Document" section * */ #define ENSURE_YAML_IEND(last_lvl, to_len) \ if ( last_lvl->spaces > to_len ) \ { \ syck_parser_pop_level( parser ); \ YYPOS(0); \ return YAML_IEND; \ } /* * Concatenates quoted string items and manages allocation * to the quoted string */ #define QUOTECAT(s, c, i, l) \ { \ if ( i + 1 >= c ) \ { \ c += QUOTELEN; \ S_REALLOC_N( s, char, c ); \ } \ s[i++] = l; \ s[i] = '\0'; \ } #define QUOTECATS(s, c, i, cs, cl) \ { \ while ( i + cl >= c ) \ { \ c += QUOTELEN; \ S_REALLOC_N( s, char, c ); \ } \ S_MEMCPY( s + i, cs, char, cl ); \ i += cl; \ s[i] = '\0'; \ } /* * Tags a plain scalar with a transfer method * * Use only in "Plain" section * */ #define RETURN_IMPLICIT() \ { \ SyckNode *n = syck_alloc_str(); \ YYCURSOR = YYTOKEN; \ n->data.str->ptr = qstr; \ n->data.str->len = qidx; \ n->data.str->style = scalar_plain; \ sycklval->nodeData = n; \ if ( parser->implicit_typing == 1 ) \ { \ try_tag_implicit( sycklval->nodeData, parser->taguri_expansion ); \ } \ return YAML_PLAIN; \ } /* concat the inline characters to the plain scalar */ #define PLAIN_NOT_INL() \ if ( *(YYCURSOR - 1) == ' ' || is_newline( YYCURSOR - 1 ) ) \ { \ YYCURSOR--; \ } \ QUOTECATS(qstr, qcapa, qidx, YYTOKEN, YYCURSOR - YYTOKEN); \ goto Plain2; /* trim spaces off the end in case of indent */ #define PLAIN_IS_INL() \ char *walker = qstr + qidx - 1; \ while ( walker > qstr && ( *walker == '\n' || *walker == ' ' ) ) \ { \ qidx--; \ walker[0] = '\0'; \ walker--; \ } /* * Keep or chomp block? * * Use only in "ScalarBlock" section * */ #define RETURN_YAML_BLOCK() \ { \ SyckNode *n = syck_alloc_str(); \ if ( ((SyckParser *)parser)->taguri_expansion == 1 ) \ { \ n->type_id = syck_taguri( YAML_DOMAIN, "str", 3 ); \ } \ else \ { \ n->type_id = syck_strndup( "str", 3 ); \ } \ n->data.str->ptr = qstr; \ n->data.str->len = qidx; \ if ( blockType == BLOCK_LIT ) { \ n->data.str->style = scalar_literal; \ } else { \ n->data.str->style = scalar_fold; \ } \ if ( qidx > 0 ) \ { \ if ( nlDoWhat != NL_KEEP ) \ { \ char *fc = n->data.str->ptr + n->data.str->len - 1; \ while ( is_newline( fc ) ) fc--; \ if ( nlDoWhat != NL_CHOMP && fc < n->data.str->ptr + n->data.str->len - 1 ) \ fc += 1; \ n->data.str->len = fc - n->data.str->ptr + 1; \ } \ } \ sycklval->nodeData = n; \ return YAML_BLOCK; \ } /* * Handles newlines, calculates indent */ #define GOBBLE_UP_YAML_INDENT( ict, start ) \ char *indent = start; \ NEWLINE(indent); \ while ( indent < YYCURSOR ) \ { \ if ( is_newline( ++indent ) ) \ { \ NEWLINE(indent); \ } \ } \ ict = 0; \ if ( *YYCURSOR == '\0' ) \ { \ ict = -1; \ start = YYCURSOR - 1; \ } \ else if ( *YYLINEPTR == ' ' ) \ { \ ict = YYCURSOR - YYLINEPTR; \ } /* * If an indent exists at the current level, back up. */ #define GET_TRUE_YAML_INDENT(indt_len) \ { \ SyckLevel *lvl_deep = CURRENT_LEVEL(); \ indt_len = lvl_deep->spaces; \ if ( indt_len == YYTOKEN - YYLINEPTR ) \ { \ SyckLevel *lvl_over; \ parser->lvl_idx--; \ lvl_over = CURRENT_LEVEL(); \ indt_len = lvl_over->spaces; \ parser->lvl_idx++; \ } \ } /* * Argjh! I hate globals! Here for syckerror() only! */ SyckParser *syck_parser_ptr = NULL; /* * Accessory funcs later in this file. */ void eat_comments( SyckParser * ); char escape_seq( char ); int is_newline( char *ptr ); int newline_len( char *ptr ); int sycklex_yaml_utf8( YYSTYPE *, SyckParser * ); int sycklex_bytecode_utf8( YYSTYPE *, SyckParser * ); int syckwrap(); /* * My own re-entrant sycklex() using re2c. * You really get used to the limited regexp. * It's really nice to not rely on backtracking and such. */ int sycklex( YYSTYPE *sycklval, SyckParser *parser ) { switch ( parser->input_type ) { case syck_yaml_utf8: return sycklex_yaml_utf8( sycklval, parser ); case syck_yaml_utf16: syckerror( "UTF-16 is not currently supported in Syck.\nPlease contribute code to help this happen!" ); break; case syck_yaml_utf32: syckerror( "UTF-32 is not currently supported in Syck.\nPlease contribute code to help this happen!" ); break; case syck_bytecode_utf8: return sycklex_bytecode_utf8( sycklval, parser ); } } /* * Parser for standard YAML [UTF-8] */ int sycklex_yaml_utf8( YYSTYPE *sycklval, SyckParser *parser ) { int doc_level = 0; syck_parser_ptr = parser; if ( YYCURSOR == NULL ) { syck_parser_read( parser ); } if ( parser->force_token != 0 ) { int t = parser->force_token; parser->force_token = 0; return t; } /*!re2c YWORDC = [A-Za-z0-9_-] ; YWORDP = [A-Za-z0-9_-\.] ; LF = ( "\n" | "\r\n" ) ; SPC = " " ; ENDSPC = ( SPC+ | LF ); YINDENT = LF ( SPC | LF )* ; NULL = [\000] ; ANY = [\001-\377] ; ISEQO = "[" ; ISEQC = "]" ; IMAPO = "{" ; IMAPC = "}" ; CDELIMS = ( ISEQC | IMAPC ) ; ICOMMA = ( "," ENDSPC ) ; ALLX = ( ":" ENDSPC ) ; DIR = "%" YWORDP+ ":" YWORDP+ ; YBLOCK = [>|] [-+0-9]* ENDSPC ; HEX = [0-9A-Fa-f] ; ESCSEQ = ["\\abefnrtv0] ; */ if ( YYLINEPTR != YYCURSOR ) { goto Document; } Header: YYTOKEN = YYCURSOR; /*!re2c "---" ENDSPC { SyckLevel *lvl = CURRENT_LEVEL(); if ( lvl->status == syck_lvl_header ) { YYPOS(3); goto Directive; } else { ENSURE_YAML_IEND(lvl, -1); YYPOS(0); return 0; } } "..." ENDSPC { SyckLevel *lvl = CURRENT_LEVEL(); if ( lvl->status == syck_lvl_header ) { goto Header; } else { ENSURE_YAML_IEND(lvl, -1); YYPOS(0); return 0; } return 0; } "#" { eat_comments( parser ); goto Header; } NULL { SyckLevel *lvl = CURRENT_LEVEL(); ENSURE_YAML_IEND(lvl, -1); YYPOS(0); return 0; } YINDENT { GOBBLE_UP_YAML_INDENT( doc_level, YYTOKEN ); goto Header; } SPC+ { doc_level = YYCURSOR - YYLINEPTR; goto Header; } ANY { YYPOS(0); goto Document; } */ Document: { SyckLevel *lvl = CURRENT_LEVEL(); if ( lvl->status == syck_lvl_header ) { lvl->status = syck_lvl_doc; } YYTOKEN = YYCURSOR; /*!re2c YINDENT { /* Isolate spaces */ int indt_len; GOBBLE_UP_YAML_INDENT( indt_len, YYTOKEN ); lvl = CURRENT_LEVEL(); doc_level = 0; /* XXX: Comment lookahead */ if ( *YYCURSOR == '#' ) { goto Document; } /* Ignore indentation inside inlines */ if ( lvl->status == syck_lvl_iseq || lvl->status == syck_lvl_imap ) { goto Document; } /* Check for open indent */ ENSURE_YAML_IEND(lvl, indt_len); ENSURE_YAML_IOPEN(lvl, indt_len, 0); if ( indt_len == -1 ) { return 0; } return YAML_INDENT; } ISEQO { ENSURE_YAML_IOPEN(lvl, doc_level, 1); lvl = CURRENT_LEVEL(); ADD_LEVEL(lvl->spaces + 1, syck_lvl_iseq); return YYTOKEN[0]; } IMAPO { ENSURE_YAML_IOPEN(lvl, doc_level, 1); lvl = CURRENT_LEVEL(); ADD_LEVEL(lvl->spaces + 1, syck_lvl_imap); return YYTOKEN[0]; } CDELIMS { POP_LEVEL(); return YYTOKEN[0]; } [:,] ENDSPC { YYPOS(1); return YYTOKEN[0]; } [-?] ENDSPC { ENSURE_YAML_IOPEN(lvl, YYTOKEN - YYLINEPTR, 1); FORCE_NEXT_TOKEN(YAML_IOPEN); if ( *YYCURSOR == '#' || is_newline( YYCURSOR ) || is_newline( YYCURSOR - 1 ) ) { YYCURSOR--; ADD_LEVEL((YYTOKEN + 1) - YYLINEPTR, syck_lvl_doc); } else /* spaces followed by content uses the space as indentation */ { ADD_LEVEL(YYCURSOR - YYLINEPTR, syck_lvl_doc); } return YYTOKEN[0]; } "&" YWORDC+ { sycklval->name = syck_strndup( YYTOKEN + 1, YYCURSOR - YYTOKEN - 1 ); /* * Remove previous anchors of the same name. Since the parser will likely * construct deeper nodes first, we want those nodes to be placed in the * queue for matching at a higher level of indentation. */ syck_hdlr_remove_anchor(parser, sycklval->name); return YAML_ANCHOR; } "*" YWORDC+ { ENSURE_YAML_IOPEN(lvl, doc_level, 1); sycklval->name = syck_strndup( YYTOKEN + 1, YYCURSOR - YYTOKEN - 1 ); return YAML_ALIAS; } "!" { goto TransferMethod; } "'" { ENSURE_YAML_IOPEN(lvl, doc_level, 1); goto SingleQuote; } "\"" { ENSURE_YAML_IOPEN(lvl, doc_level, 1); goto DoubleQuote; } YBLOCK { if ( is_newline( YYCURSOR - 1 ) ) { YYCURSOR--; } goto ScalarBlock; } "#" { eat_comments( parser ); goto Document; } SPC+ { goto Document; } NULL { ENSURE_YAML_IEND(lvl, -1); YYPOS(0); return 0; } ANY { ENSURE_YAML_IOPEN(lvl, doc_level, 1); goto Plain; } */ } Directive: { YYTOKTMP = YYCURSOR; /*!re2c DIR { goto Directive; } SPC+ { goto Directive; } ANY { YYCURSOR = YYTOKTMP; return YAML_DOCSEP; } */ } Plain: { int qidx = 0; int qcapa = 100; char *qstr = S_ALLOC_N( char, qcapa ); SyckLevel *plvl; int parentIndent; YYCURSOR = YYTOKEN; plvl = CURRENT_LEVEL(); GET_TRUE_YAML_INDENT(parentIndent); Plain2: YYTOKEN = YYCURSOR; Plain3: /*!re2c YINDENT { int indt_len, nl_count = 0; SyckLevel *lvl; char *tok = YYTOKEN; GOBBLE_UP_YAML_INDENT( indt_len, tok ); lvl = CURRENT_LEVEL(); if ( indt_len <= parentIndent ) { RETURN_IMPLICIT(); } while ( YYTOKEN < YYCURSOR ) { int nl_len = 0; if ( nl_len = newline_len( YYTOKEN++ ) ) { nl_count++; YYTOKEN += nl_len - 1; } } if ( nl_count <= 1 ) { QUOTECAT(qstr, qcapa, qidx, ' '); } else { int i; for ( i = 0; i < nl_count - 1; i++ ) { QUOTECAT(qstr, qcapa, qidx, '\n'); } } goto Plain2; } ALLX { RETURN_IMPLICIT(); } ICOMMA { if ( plvl->status != syck_lvl_iseq && plvl->status != syck_lvl_imap ) { PLAIN_NOT_INL(); } else { PLAIN_IS_INL(); } RETURN_IMPLICIT(); } IMAPC { if ( plvl->status != syck_lvl_imap ) { PLAIN_NOT_INL(); } else { PLAIN_IS_INL(); } RETURN_IMPLICIT(); } ISEQC { if ( plvl->status != syck_lvl_iseq ) { PLAIN_NOT_INL(); } else { PLAIN_IS_INL(); } RETURN_IMPLICIT(); } " #" { eat_comments( parser ); RETURN_IMPLICIT(); } NULL { RETURN_IMPLICIT(); } SPC { goto Plain3; } ANY { QUOTECATS(qstr, qcapa, qidx, YYTOKEN, YYCURSOR - YYTOKEN); goto Plain2; } */ } SingleQuote: { int qidx = 0; int qcapa = 100; char *qstr = S_ALLOC_N( char, qcapa ); SingleQuote2: YYTOKEN = YYCURSOR; /*!re2c YINDENT { int indt_len; int nl_count = 0; SyckLevel *lvl; GOBBLE_UP_YAML_INDENT( indt_len, YYTOKEN ); lvl = CURRENT_LEVEL(); if ( lvl->status != syck_lvl_str ) { ADD_LEVEL( indt_len, syck_lvl_str ); } else if ( indt_len < lvl->spaces ) { /* Error! */ } while ( YYTOKEN < YYCURSOR ) { int nl_len = 0; if ( nl_len = newline_len( YYTOKEN++ ) ) { nl_count++; YYTOKEN += nl_len - 1; } } if ( nl_count <= 1 ) { QUOTECAT(qstr, qcapa, qidx, ' '); } else { int i; for ( i = 0; i < nl_count - 1; i++ ) { QUOTECAT(qstr, qcapa, qidx, '\n'); } } goto SingleQuote2; } "''" { QUOTECAT(qstr, qcapa, qidx, '\''); goto SingleQuote2; } ( "'" | NULL ) { SyckLevel *lvl; SyckNode *n = syck_alloc_str(); lvl = CURRENT_LEVEL(); if ( lvl->status == syck_lvl_str ) { POP_LEVEL(); } if ( ((SyckParser *)parser)->taguri_expansion == 1 ) { n->type_id = syck_taguri( YAML_DOMAIN, "str", 3 ); } else { n->type_id = syck_strndup( "str", 3 ); } n->data.str->ptr = qstr; n->data.str->len = qidx; n->data.str->style = scalar_1quote; sycklval->nodeData = n; return YAML_PLAIN; } ANY { QUOTECAT(qstr, qcapa, qidx, *(YYCURSOR - 1)); goto SingleQuote2; } */ } DoubleQuote: { int keep_nl = 1; int qidx = 0; int qcapa = 100; char *qstr = S_ALLOC_N( char, qcapa ); DoubleQuote2: YYTOKEN = YYCURSOR; /*!re2c YINDENT { int indt_len; int nl_count = 0; SyckLevel *lvl; GOBBLE_UP_YAML_INDENT( indt_len, YYTOKEN ); lvl = CURRENT_LEVEL(); if ( lvl->status != syck_lvl_str ) { ADD_LEVEL( indt_len, syck_lvl_str ); } else if ( indt_len < lvl->spaces ) { /* FIXME */ } if ( keep_nl == 1 ) { while ( YYTOKEN < YYCURSOR ) { int nl_len = 0; if ( nl_len = newline_len( YYTOKEN++ ) ) { nl_count++; YYTOKEN += nl_len - 1; } } if ( nl_count <= 1 ) { QUOTECAT(qstr, qcapa, qidx, ' '); } else { int i; for ( i = 0; i < nl_count - 1; i++ ) { QUOTECAT(qstr, qcapa, qidx, '\n'); } } } keep_nl = 1; goto DoubleQuote2; } "\\" ESCSEQ { char ch = *( YYCURSOR - 1 ); QUOTECAT(qstr, qcapa, qidx, escape_seq( ch )); goto DoubleQuote2; } "\\x" HEX HEX { long ch; char *chr_text = syck_strndup( YYTOKEN, 4 ); chr_text[0] = '0'; ch = strtol( chr_text, NULL, 16 ); free( chr_text ); QUOTECAT(qstr, qcapa, qidx, ch); goto DoubleQuote2; } "\\" SPC* LF { keep_nl = 0; YYCURSOR--; goto DoubleQuote2; } ( "\"" | NULL ) { SyckLevel *lvl; SyckNode *n = syck_alloc_str(); lvl = CURRENT_LEVEL(); if ( lvl->status == syck_lvl_str ) { POP_LEVEL(); } if ( ((SyckParser *)parser)->taguri_expansion == 1 ) { n->type_id = syck_taguri( YAML_DOMAIN, "str", 3 ); } else { n->type_id = syck_strndup( "str", 3 ); } n->data.str->ptr = qstr; n->data.str->len = qidx; n->data.str->style = scalar_2quote; sycklval->nodeData = n; return YAML_PLAIN; } ANY { QUOTECAT(qstr, qcapa, qidx, *(YYCURSOR - 1)); goto DoubleQuote2; } */ } TransferMethod: { int qidx = 0; int qcapa = 100; char *qstr = S_ALLOC_N( char, qcapa ); TransferMethod2: YYTOKTMP = YYCURSOR; /*!re2c ( ENDSPC | NULL ) { SyckLevel *lvl; YYCURSOR = YYTOKTMP; if ( YYCURSOR == YYTOKEN + 1 ) { free( qstr ); return YAML_ITRANSFER; } lvl = CURRENT_LEVEL(); /* * URL Prefixing */ if ( *qstr == '^' ) { sycklval->name = S_ALLOC_N( char, qidx + strlen( lvl->domain ) ); sycklval->name[0] = '\0'; strcat( sycklval->name, lvl->domain ); strncat( sycklval->name, qstr + 1, qidx - 1 ); free( qstr ); } else { char *carat = qstr; char *qend = qstr + qidx; while ( (++carat) < qend ) { if ( *carat == '^' ) break; } if ( carat < qend ) { free( lvl->domain ); lvl->domain = syck_strndup( qstr, carat - qstr ); sycklval->name = S_ALLOC_N( char, ( qend - carat ) + strlen( lvl->domain ) ); sycklval->name[0] = '\0'; strcat( sycklval->name, lvl->domain ); strncat( sycklval->name, carat + 1, ( qend - carat ) - 1 ); free( qstr ); } else { sycklval->name = qstr; } } return YAML_TRANSFER; } /* * URL Escapes */ "\\" ESCSEQ { char ch = *( YYCURSOR - 1 ); QUOTECAT(qstr, qcapa, qidx, escape_seq( ch )); goto TransferMethod2; } "\\x" HEX HEX { long ch; char *chr_text = syck_strndup( YYTOKTMP, 4 ); chr_text[0] = '0'; ch = strtol( chr_text, NULL, 16 ); free( chr_text ); QUOTECAT(qstr, qcapa, qidx, ch); goto TransferMethod2; } ANY { QUOTECAT(qstr, qcapa, qidx, *(YYCURSOR - 1)); goto TransferMethod2; } */ } ScalarBlock: { int qidx = 0; int qcapa = 100; char *qstr = S_ALLOC_N( char, qcapa ); int blockType = 0; int nlDoWhat = 0; int lastIndent = 0; int forceIndent = -1; char *yyt = YYTOKEN; SyckLevel *lvl = CURRENT_LEVEL(); int parentIndent; GET_TRUE_YAML_INDENT(parentIndent); switch ( *yyt ) { case '|': blockType = BLOCK_LIT; break; case '>': blockType = BLOCK_FOLD; break; } while ( ++yyt <= YYCURSOR ) { if ( *yyt == '-' ) { nlDoWhat = NL_CHOMP; } else if ( *yyt == '+' ) { nlDoWhat = NL_KEEP; } else if ( isdigit( *yyt ) ) { forceIndent = strtol( yyt, NULL, 10 ) + parentIndent; } } qstr[0] = '\0'; YYTOKEN = YYCURSOR; ScalarBlock2: YYTOKEN = YYCURSOR; /*!re2c YINDENT { char *pacer; char *tok = YYTOKEN; int indt_len = 0, nl_count = 0, fold_nl = 0, nl_begin = 0; GOBBLE_UP_YAML_INDENT( indt_len, tok ); lvl = CURRENT_LEVEL(); if ( indt_len > parentIndent && lvl->status != syck_lvl_block ) { int new_spaces = forceIndent > 0 ? forceIndent : indt_len; ADD_LEVEL( new_spaces, syck_lvl_block ); lastIndent = indt_len - new_spaces; nl_begin = 1; lvl = CURRENT_LEVEL(); } else if ( lvl->status != syck_lvl_block ) { YYCURSOR = YYTOKEN; RETURN_YAML_BLOCK(); } /* * Fold only in the event of two lines being on the leftmost * indentation. */ if ( blockType == BLOCK_FOLD && lastIndent == 0 && ( indt_len - lvl->spaces ) == 0 ) { fold_nl = 1; } pacer = YYTOKEN; while ( pacer < YYCURSOR ) { int nl_len = 0; if ( nl_len = newline_len( pacer++ ) ) { nl_count++; pacer += nl_len - 1; } } if ( fold_nl == 1 || nl_begin == 1 ) { nl_count--; } if ( nl_count < 1 && nl_begin == 0 ) { QUOTECAT(qstr, qcapa, qidx, ' '); } else { int i; for ( i = 0; i < nl_count; i++ ) { QUOTECAT(qstr, qcapa, qidx, '\n'); } } lastIndent = indt_len - lvl->spaces; YYCURSOR -= lastIndent; if ( indt_len < lvl->spaces ) { POP_LEVEL(); YYCURSOR = YYTOKEN; RETURN_YAML_BLOCK(); } goto ScalarBlock2; } "#" { lvl = CURRENT_LEVEL(); if ( lvl->status != syck_lvl_block ) { eat_comments( parser ); YYTOKEN = YYCURSOR; } else { QUOTECAT(qstr, qcapa, qidx, *YYTOKEN); } goto ScalarBlock2; } NULL { YYCURSOR--; POP_LEVEL(); RETURN_YAML_BLOCK(); } "---" ENDSPC { if ( YYTOKEN == YYLINEPTR ) { if ( blockType == BLOCK_FOLD && qidx > 0 ) { qidx -= 1; } QUOTECAT(qstr, qcapa, qidx, '\n'); POP_LEVEL(); YYCURSOR = YYTOKEN; RETURN_YAML_BLOCK(); } else { QUOTECAT(qstr, qcapa, qidx, *YYTOKEN); YYCURSOR = YYTOKEN + 1; goto ScalarBlock2; } } ANY { QUOTECAT(qstr, qcapa, qidx, *YYTOKEN); goto ScalarBlock2; } */ } return 0; } void eat_comments( SyckParser *parser ) { Comment: { YYTOKEN = YYCURSOR; /*!re2c ( LF+ | NULL ) { YYCURSOR = YYTOKEN; return; } ANY { goto Comment; } */ } } char escape_seq( char ch ) { switch ( ch ) { case '0': return '\0'; case 'a': return 7; case 'b': return '\010'; case 'e': return '\033'; case 'f': return '\014'; case 'n': return '\n'; case 'r': return '\015'; case 't': return '\t'; case 'v': return '\013'; default: return ch; } } int is_newline( char *ptr ) { return newline_len( ptr ); } int newline_len( char *ptr ) { if ( *ptr == '\n' ) return 1; if ( *ptr == '\r' && *( ptr + 1 ) == '\n' ) return 2; return 0; } int syckwrap() { return 1; } void syckerror( char *msg ) { if ( syck_parser_ptr->error_handler == NULL ) syck_parser_ptr->error_handler = syck_default_error_handler; syck_parser_ptr->root = syck_parser_ptr->root_on_error; (syck_parser_ptr->error_handler)(syck_parser_ptr, msg); } ruby-mkrf-0.2.3.orig/test/sample_files/syck-0.55/lib/implicit.c0000644000000000000000000013171511672453175022616 0ustar rootroot/* Generated by re2c 0.9.3 on Wed Mar 30 08:27:25 2005 */ #line 1 "implicit.re" /* * implicit.re * * $Author: why $ * $Date: 2005/04/06 17:18:59 $ * * Copyright (C) 2003 why the lucky stiff */ #include "syck.h" #define YYCTYPE char #define YYCURSOR cursor #define YYMARKER marker #define YYLIMIT limit #define YYFILL(n) void try_tag_implicit( SyckNode *n, int taguri ) { char *tid = ""; switch ( n->kind ) { case syck_str_kind: tid = syck_match_implicit( n->data.str->ptr, n->data.str->len ); break; case syck_seq_kind: tid = "seq"; break; case syck_map_kind: tid = "map"; break; } if ( n->type_id != NULL ) S_FREE( n->type_id ); if ( taguri == 1 ) { n->type_id = syck_taguri( YAML_DOMAIN, tid, strlen( tid ) ); } else { n->type_id = syck_strndup( tid, strlen( tid ) ); } } char *syck_match_implicit( char *str, size_t len ) { char *cursor, *limit, *marker; cursor = str; limit = str + len; #line 6 "" { YYCTYPE yych; unsigned int yyaccept; goto yy0; yy1: ++YYCURSOR; yy0: if((YYLIMIT - YYCURSOR) < 26) YYFILL(26); yych = *YYCURSOR; switch(yych){ case '\000': goto yy6; case '+': goto yy16; case '-': goto yy17; case '.': goto yy20; case '0': goto yy18; case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': goto yy19; case '<': goto yy22; case '=': goto yy21; case 'F': goto yy15; case 'N': goto yy5; case 'O': goto yy13; case 'T': goto yy11; case 'Y': goto yy9; case 'f': goto yy14; case 'n': goto yy4; case 'o': goto yy12; case 't': goto yy10; case 'y': goto yy8; case '~': goto yy2; default: goto yy23; } yy2: ++YYCURSOR; if((yych = *YYCURSOR) <= '\000') goto yy6; goto yy3; yy3: #line 123 "implicit.re" { return "str"; } #line 51 "" yy4: yyaccept = 0; yych = *(YYMARKER = ++YYCURSOR); switch(yych){ case 'o': goto yy172; case 'u': goto yy200; default: goto yy3; } yy5: yyaccept = 0; yych = *(YYMARKER = ++YYCURSOR); switch(yych){ case 'O': case 'o': goto yy172; case 'U': goto yy195; case 'u': goto yy196; default: goto yy3; } yy6: ++YYCURSOR; goto yy7; yy7: #line 85 "implicit.re" { return "null"; } #line 72 "" yy8: yyaccept = 0; yych = *(YYMARKER = ++YYCURSOR); switch(yych){ case 'e': goto yy194; default: goto yy3; } yy9: yyaccept = 0; yych = *(YYMARKER = ++YYCURSOR); switch(yych){ case 'E': goto yy192; case 'e': goto yy193; default: goto yy3; } yy10: yyaccept = 0; yych = *(YYMARKER = ++YYCURSOR); switch(yych){ case 'r': goto yy190; default: goto yy3; } yy11: yyaccept = 0; yych = *(YYMARKER = ++YYCURSOR); switch(yych){ case 'R': goto yy186; case 'r': goto yy187; default: goto yy3; } yy12: yyaccept = 0; yych = *(YYMARKER = ++YYCURSOR); switch(yych){ case 'f': goto yy185; case 'n': goto yy182; default: goto yy3; } yy13: yyaccept = 0; yych = *(YYMARKER = ++YYCURSOR); switch(yych){ case 'F': goto yy180; case 'N': case 'n': goto yy182; case 'f': goto yy181; default: goto yy3; } yy14: yyaccept = 0; yych = *(YYMARKER = ++YYCURSOR); switch(yych){ case 'a': goto yy177; default: goto yy3; } yy15: yyaccept = 0; yych = *(YYMARKER = ++YYCURSOR); switch(yych){ case 'A': goto yy168; case 'a': goto yy169; default: goto yy3; } yy16: yyaccept = 0; yych = *(YYMARKER = ++YYCURSOR); switch(yych){ case '.': goto yy167; case '0': goto yy158; case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': goto yy47; default: goto yy3; } yy17: yyaccept = 0; yych = *(YYMARKER = ++YYCURSOR); switch(yych){ case '.': goto yy157; case '0': goto yy158; case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': goto yy47; default: goto yy3; } yy18: yyaccept = 0; yych = *(YYMARKER = ++YYCURSOR); switch(yych){ case '\000': goto yy52; case ',': goto yy142; case '.': goto yy50; case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': goto yy140; case '8': case '9': goto yy141; case ':': goto yy49; case 'x': goto yy144; default: goto yy3; } yy19: yyaccept = 0; yych = *(YYMARKER = ++YYCURSOR); switch(yych){ case '\000': goto yy52; case ',': goto yy47; case '.': goto yy50; case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': goto yy46; case ':': goto yy49; default: goto yy3; } yy20: yyaccept = 0; yych = *(YYMARKER = ++YYCURSOR); switch(yych){ case 'I': goto yy33; case 'N': goto yy31; case 'i': goto yy32; case 'n': goto yy30; default: goto yy3; } yy21: yych = *++YYCURSOR; if(yych <= '\000') goto yy28; goto yy3; yy22: yyaccept = 0; yych = *(YYMARKER = ++YYCURSOR); switch(yych){ case '<': goto yy24; default: goto yy3; } yy23: yych = *++YYCURSOR; goto yy3; yy24: yych = *++YYCURSOR; if(yych <= '\000') goto yy26; goto yy25; yy25: YYCURSOR = YYMARKER; switch(yyaccept){ case 0: goto yy3; } yy26: ++YYCURSOR; goto yy27; yy27: #line 121 "implicit.re" { return "merge"; } #line 230 "" yy28: ++YYCURSOR; goto yy29; yy29: #line 119 "implicit.re" { return "default"; } #line 236 "" yy30: yych = *++YYCURSOR; switch(yych){ case 'a': goto yy45; default: goto yy25; } yy31: yych = *++YYCURSOR; switch(yych){ case 'A': goto yy40; case 'a': goto yy41; default: goto yy25; } yy32: yych = *++YYCURSOR; switch(yych){ case 'n': goto yy39; default: goto yy25; } yy33: yych = *++YYCURSOR; switch(yych){ case 'N': goto yy34; case 'n': goto yy35; default: goto yy25; } yy34: yych = *++YYCURSOR; switch(yych){ case 'F': goto yy36; default: goto yy25; } yy35: yych = *++YYCURSOR; switch(yych){ case 'f': goto yy36; default: goto yy25; } yy36: yych = *++YYCURSOR; if(yych >= '\001') goto yy25; goto yy37; yy37: ++YYCURSOR; goto yy38; yy38: #line 105 "implicit.re" { return "float#inf"; } #line 277 "" yy39: yych = *++YYCURSOR; switch(yych){ case 'f': goto yy36; default: goto yy25; } yy40: yych = *++YYCURSOR; switch(yych){ case 'N': goto yy42; default: goto yy25; } yy41: yych = *++YYCURSOR; switch(yych){ case 'N': goto yy42; default: goto yy25; } yy42: yych = *++YYCURSOR; if(yych >= '\001') goto yy25; goto yy43; yy43: ++YYCURSOR; goto yy44; yy44: #line 109 "implicit.re" { return "float#nan"; } #line 301 "" yy45: yych = *++YYCURSOR; switch(yych){ case 'n': goto yy42; default: goto yy25; } yy46: yych = *++YYCURSOR; switch(yych){ case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': goto yy74; default: goto yy48; } yy47: ++YYCURSOR; if(YYLIMIT == YYCURSOR) YYFILL(1); yych = *YYCURSOR; goto yy48; yy48: switch(yych){ case '\000': goto yy52; case ',': case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': goto yy47; case '.': goto yy50; case ':': goto yy49; default: goto yy25; } yy49: ++YYCURSOR; if(YYLIMIT == YYCURSOR) YYFILL(1); yych = *YYCURSOR; switch(yych){ case '0': case '1': case '2': case '3': case '4': case '5': goto yy66; case '6': case '7': case '8': case '9': goto yy67; default: goto yy25; } yy50: ++YYCURSOR; if(YYLIMIT == YYCURSOR) YYFILL(1); yych = *YYCURSOR; goto yy51; yy51: switch(yych){ case '\000': goto yy56; case ',': goto yy54; case '.': goto yy58; case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': goto yy50; case 'E': case 'e': goto yy60; default: goto yy25; } yy52: ++YYCURSOR; goto yy53; yy53: #line 97 "implicit.re" { return "int"; } #line 386 "" yy54: ++YYCURSOR; if(YYLIMIT == YYCURSOR) YYFILL(1); yych = *YYCURSOR; goto yy55; yy55: switch(yych){ case '\000': goto yy56; case ',': case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': goto yy54; default: goto yy25; } yy56: ++YYCURSOR; goto yy57; yy57: #line 99 "implicit.re" { return "float#fix"; } #line 411 "" yy58: ++YYCURSOR; if(YYLIMIT == YYCURSOR) YYFILL(1); yych = *YYCURSOR; goto yy59; yy59: switch(yych){ case '.': case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': goto yy58; case 'E': case 'e': goto yy60; default: goto yy25; } yy60: yych = *++YYCURSOR; switch(yych){ case '+': case '-': goto yy61; default: goto yy25; } yy61: yych = *++YYCURSOR; if(yych <= '\000') goto yy25; goto yy63; yy62: ++YYCURSOR; if(YYLIMIT == YYCURSOR) YYFILL(1); yych = *YYCURSOR; goto yy63; yy63: switch(yych){ case '\000': goto yy64; case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': goto yy62; default: goto yy25; } yy64: ++YYCURSOR; goto yy65; yy65: #line 101 "implicit.re" { return "float#exp"; } #line 463 "" yy66: ++YYCURSOR; if(YYLIMIT == YYCURSOR) YYFILL(1); yych = *YYCURSOR; switch(yych){ case '\000': goto yy70; case '.': goto yy68; case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': goto yy67; case ':': goto yy49; default: goto yy25; } yy67: ++YYCURSOR; if(YYLIMIT == YYCURSOR) YYFILL(1); yych = *YYCURSOR; switch(yych){ case '\000': goto yy70; case '.': goto yy68; case ':': goto yy49; default: goto yy25; } yy68: ++YYCURSOR; if(YYLIMIT == YYCURSOR) YYFILL(1); yych = *YYCURSOR; goto yy69; yy69: switch(yych){ case '\000': goto yy72; case ',': case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': goto yy68; default: goto yy25; } yy70: ++YYCURSOR; goto yy71; yy71: #line 95 "implicit.re" { return "int#base60"; } #line 518 "" yy72: ++YYCURSOR; goto yy73; yy73: #line 103 "implicit.re" { return "float#base60"; } #line 524 "" yy74: yych = *++YYCURSOR; switch(yych){ case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': goto yy75; default: goto yy48; } yy75: yych = *++YYCURSOR; switch(yych){ case '-': goto yy76; default: goto yy48; } yy76: yych = *++YYCURSOR; switch(yych){ case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': goto yy77; default: goto yy25; } yy77: yych = *++YYCURSOR; switch(yych){ case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': goto yy78; default: goto yy25; } yy78: yych = *++YYCURSOR; switch(yych){ case '-': goto yy79; default: goto yy25; } yy79: yych = *++YYCURSOR; switch(yych){ case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': goto yy80; default: goto yy25; } yy80: yych = *++YYCURSOR; switch(yych){ case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': goto yy81; default: goto yy25; } yy81: yych = *++YYCURSOR; switch(yych){ case '\000': goto yy82; case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': goto yy25; case 'T': goto yy84; case 't': goto yy85; default: goto yy87; } yy82: ++YYCURSOR; goto yy83; yy83: #line 111 "implicit.re" { return "timestamp#ymd"; } #line 627 "" yy84: yych = *++YYCURSOR; switch(yych){ case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': goto yy126; default: goto yy25; } yy85: yych = *++YYCURSOR; switch(yych){ case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': goto yy108; default: goto yy25; } yy86: ++YYCURSOR; if(YYLIMIT == YYCURSOR) YYFILL(1); yych = *YYCURSOR; goto yy87; yy87: switch(yych){ case '\t': case ' ': goto yy86; case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': goto yy88; default: goto yy25; } yy88: yych = *++YYCURSOR; switch(yych){ case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': goto yy89; default: goto yy25; } yy89: yych = *++YYCURSOR; switch(yych){ case ':': goto yy90; default: goto yy25; } yy90: yych = *++YYCURSOR; switch(yych){ case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': goto yy91; default: goto yy25; } yy91: yych = *++YYCURSOR; switch(yych){ case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': goto yy92; default: goto yy25; } yy92: yych = *++YYCURSOR; switch(yych){ case ':': goto yy93; default: goto yy25; } yy93: yych = *++YYCURSOR; switch(yych){ case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': goto yy94; default: goto yy25; } yy94: yych = *++YYCURSOR; switch(yych){ case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': goto yy95; default: goto yy25; } yy95: yych = *++YYCURSOR; switch(yych){ case '\t': case ' ': goto yy98; case '.': goto yy96; default: goto yy25; } yy96: ++YYCURSOR; if(YYLIMIT == YYCURSOR) YYFILL(1); yych = *YYCURSOR; goto yy97; yy97: switch(yych){ case '\t': case ' ': goto yy98; case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': goto yy96; default: goto yy25; } yy98: ++YYCURSOR; if(YYLIMIT == YYCURSOR) YYFILL(1); yych = *YYCURSOR; goto yy99; yy99: switch(yych){ case '\t': case ' ': goto yy98; case '+': case '-': goto yy101; case 'Z': goto yy100; default: goto yy25; } yy100: yych = *++YYCURSOR; if(yych <= '\000') goto yy105; goto yy25; yy101: yych = *++YYCURSOR; switch(yych){ case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': goto yy102; default: goto yy25; } yy102: yych = *++YYCURSOR; switch(yych){ case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': goto yy103; default: goto yy25; } yy103: yych = *++YYCURSOR; switch(yych){ case '\000': goto yy105; case ':': goto yy104; default: goto yy25; } yy104: yych = *++YYCURSOR; switch(yych){ case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': goto yy107; default: goto yy25; } yy105: ++YYCURSOR; goto yy106; yy106: #line 115 "implicit.re" { return "timestamp#spaced"; } #line 847 "" yy107: yych = *++YYCURSOR; switch(yych){ case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': goto yy100; default: goto yy25; } yy108: yych = *++YYCURSOR; switch(yych){ case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': goto yy109; default: goto yy25; } yy109: yych = *++YYCURSOR; switch(yych){ case ':': goto yy110; default: goto yy25; } yy110: yych = *++YYCURSOR; switch(yych){ case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': goto yy111; default: goto yy25; } yy111: yych = *++YYCURSOR; switch(yych){ case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': goto yy112; default: goto yy25; } yy112: yych = *++YYCURSOR; switch(yych){ case ':': goto yy113; default: goto yy25; } yy113: yych = *++YYCURSOR; switch(yych){ case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': goto yy114; default: goto yy25; } yy114: yych = *++YYCURSOR; switch(yych){ case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': goto yy115; default: goto yy25; } yy115: yych = *++YYCURSOR; switch(yych){ case '.': goto yy116; case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': goto yy25; default: goto yy117; } yy116: ++YYCURSOR; if(YYLIMIT == YYCURSOR) YYFILL(1); yych = *YYCURSOR; goto yy117; yy117: switch(yych){ case '+': case '-': goto yy119; case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': goto yy116; case 'Z': goto yy118; default: goto yy25; } yy118: yych = *++YYCURSOR; if(yych <= '\000') goto yy123; goto yy25; yy119: yych = *++YYCURSOR; switch(yych){ case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': goto yy120; default: goto yy25; } yy120: yych = *++YYCURSOR; switch(yych){ case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': goto yy121; default: goto yy25; } yy121: yych = *++YYCURSOR; switch(yych){ case '\000': goto yy123; case ':': goto yy122; default: goto yy25; } yy122: yych = *++YYCURSOR; switch(yych){ case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': goto yy125; default: goto yy25; } yy123: ++YYCURSOR; goto yy124; yy124: #line 113 "implicit.re" { return "timestamp#iso8601"; } #line 1033 "" yy125: yych = *++YYCURSOR; switch(yych){ case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': goto yy118; default: goto yy25; } yy126: yych = *++YYCURSOR; switch(yych){ case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': goto yy127; default: goto yy25; } yy127: yych = *++YYCURSOR; switch(yych){ case ':': goto yy128; default: goto yy25; } yy128: yych = *++YYCURSOR; switch(yych){ case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': goto yy129; default: goto yy25; } yy129: yych = *++YYCURSOR; switch(yych){ case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': goto yy130; default: goto yy25; } yy130: yych = *++YYCURSOR; switch(yych){ case ':': goto yy131; default: goto yy25; } yy131: yych = *++YYCURSOR; switch(yych){ case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': goto yy132; default: goto yy25; } yy132: yych = *++YYCURSOR; switch(yych){ case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': goto yy133; default: goto yy25; } yy133: yych = *++YYCURSOR; switch(yych){ case '.': goto yy134; case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': goto yy25; case 'Z': goto yy136; default: goto yy135; } yy134: ++YYCURSOR; if((YYLIMIT - YYCURSOR) < 7) YYFILL(7); yych = *YYCURSOR; goto yy135; yy135: switch(yych){ case '+': case '-': goto yy119; case '0': goto yy134; case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': goto yy138; case 'Z': goto yy118; default: goto yy25; } yy136: yych = *++YYCURSOR; if(yych >= '\001') goto yy25; goto yy137; yy137: yych = *++YYCURSOR; goto yy124; yy138: ++YYCURSOR; if((YYLIMIT - YYCURSOR) < 7) YYFILL(7); yych = *YYCURSOR; goto yy139; yy139: switch(yych){ case '+': case '-': goto yy119; case '0': goto yy134; case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': goto yy138; case 'Z': goto yy136; default: goto yy25; } yy140: yych = *++YYCURSOR; switch(yych){ case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': goto yy155; case '8': case '9': goto yy153; default: goto yy143; } yy141: yych = *++YYCURSOR; switch(yych){ case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': goto yy153; default: goto yy152; } yy142: ++YYCURSOR; if(YYLIMIT == YYCURSOR) YYFILL(1); yych = *YYCURSOR; goto yy143; yy143: switch(yych){ case '\000': goto yy149; case ',': case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': goto yy142; case '.': goto yy50; case '8': case '9': goto yy151; case ':': goto yy49; default: goto yy25; } yy144: yych = *++YYCURSOR; if(yych <= '\000') goto yy25; goto yy146; yy145: ++YYCURSOR; if(YYLIMIT == YYCURSOR) YYFILL(1); yych = *YYCURSOR; goto yy146; yy146: switch(yych){ case '\000': goto yy147; case ',': case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': case 'A': case 'B': case 'C': case 'D': case 'E': case 'F': case 'a': case 'b': case 'c': case 'd': case 'e': case 'f': goto yy145; default: goto yy25; } yy147: ++YYCURSOR; goto yy148; yy148: #line 91 "implicit.re" { return "int#hex"; } #line 1275 "" yy149: ++YYCURSOR; goto yy150; yy150: #line 93 "implicit.re" { return "int#oct"; } #line 1281 "" yy151: ++YYCURSOR; if(YYLIMIT == YYCURSOR) YYFILL(1); yych = *YYCURSOR; goto yy152; yy152: switch(yych){ case ',': case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': goto yy151; case '.': goto yy50; case ':': goto yy49; default: goto yy25; } yy153: yych = *++YYCURSOR; switch(yych){ case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': goto yy154; default: goto yy152; } yy154: yych = *++YYCURSOR; switch(yych){ case '-': goto yy76; default: goto yy152; } yy155: yych = *++YYCURSOR; switch(yych){ case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': goto yy156; case '8': case '9': goto yy154; default: goto yy143; } yy156: yych = *++YYCURSOR; switch(yych){ case '-': goto yy76; default: goto yy143; } yy157: yych = *++YYCURSOR; switch(yych){ case 'I': goto yy160; case 'i': goto yy159; default: goto yy25; } yy158: yych = *++YYCURSOR; switch(yych){ case '\000': goto yy52; case 'x': goto yy144; default: goto yy143; } yy159: yych = *++YYCURSOR; switch(yych){ case 'n': goto yy166; default: goto yy25; } yy160: yych = *++YYCURSOR; switch(yych){ case 'N': goto yy161; case 'n': goto yy162; default: goto yy25; } yy161: yych = *++YYCURSOR; switch(yych){ case 'F': goto yy163; default: goto yy25; } yy162: yych = *++YYCURSOR; switch(yych){ case 'f': goto yy163; default: goto yy25; } yy163: yych = *++YYCURSOR; if(yych >= '\001') goto yy25; goto yy164; yy164: ++YYCURSOR; goto yy165; yy165: #line 107 "implicit.re" { return "float#neginf"; } #line 1381 "" yy166: yych = *++YYCURSOR; switch(yych){ case 'f': goto yy163; default: goto yy25; } yy167: yych = *++YYCURSOR; switch(yych){ case 'I': goto yy33; case 'i': goto yy32; default: goto yy25; } yy168: yych = *++YYCURSOR; switch(yych){ case 'L': goto yy175; default: goto yy25; } yy169: yych = *++YYCURSOR; switch(yych){ case 'l': goto yy170; default: goto yy25; } yy170: yych = *++YYCURSOR; switch(yych){ case 's': goto yy171; default: goto yy25; } yy171: yych = *++YYCURSOR; switch(yych){ case 'e': goto yy172; default: goto yy25; } yy172: yych = *++YYCURSOR; if(yych >= '\001') goto yy25; goto yy173; yy173: ++YYCURSOR; goto yy174; yy174: #line 89 "implicit.re" { return "bool#no"; } #line 1421 "" yy175: yych = *++YYCURSOR; switch(yych){ case 'S': goto yy176; default: goto yy25; } yy176: yych = *++YYCURSOR; switch(yych){ case 'E': goto yy172; default: goto yy25; } yy177: yych = *++YYCURSOR; switch(yych){ case 'l': goto yy178; default: goto yy25; } yy178: yych = *++YYCURSOR; switch(yych){ case 's': goto yy179; default: goto yy25; } yy179: yych = *++YYCURSOR; switch(yych){ case 'e': goto yy172; default: goto yy25; } yy180: yych = *++YYCURSOR; switch(yych){ case 'F': goto yy172; default: goto yy25; } yy181: yych = *++YYCURSOR; switch(yych){ case 'f': goto yy172; default: goto yy25; } yy182: yych = *++YYCURSOR; if(yych >= '\001') goto yy25; goto yy183; yy183: ++YYCURSOR; goto yy184; yy184: #line 87 "implicit.re" { return "bool#yes"; } #line 1465 "" yy185: yych = *++YYCURSOR; switch(yych){ case 'f': goto yy172; default: goto yy25; } yy186: yych = *++YYCURSOR; switch(yych){ case 'U': goto yy189; default: goto yy25; } yy187: yych = *++YYCURSOR; switch(yych){ case 'u': goto yy188; default: goto yy25; } yy188: yych = *++YYCURSOR; switch(yych){ case 'e': goto yy182; default: goto yy25; } yy189: yych = *++YYCURSOR; switch(yych){ case 'E': goto yy182; default: goto yy25; } yy190: yych = *++YYCURSOR; switch(yych){ case 'u': goto yy191; default: goto yy25; } yy191: yych = *++YYCURSOR; switch(yych){ case 'e': goto yy182; default: goto yy25; } yy192: yych = *++YYCURSOR; switch(yych){ case 'S': goto yy182; default: goto yy25; } yy193: yych = *++YYCURSOR; switch(yych){ case 's': goto yy182; default: goto yy25; } yy194: yych = *++YYCURSOR; switch(yych){ case 's': goto yy182; default: goto yy25; } yy195: yych = *++YYCURSOR; switch(yych){ case 'L': goto yy199; default: goto yy25; } yy196: yych = *++YYCURSOR; switch(yych){ case 'l': goto yy197; default: goto yy25; } yy197: yych = *++YYCURSOR; switch(yych){ case 'l': goto yy198; default: goto yy25; } yy198: yych = *++YYCURSOR; if(yych <= '\000') goto yy6; goto yy25; yy199: yych = *++YYCURSOR; switch(yych){ case 'L': goto yy198; default: goto yy25; } yy200: yych = *++YYCURSOR; switch(yych){ case 'l': goto yy201; default: goto yy25; } yy201: ++YYCURSOR; switch((yych = *YYCURSOR)) { case 'l': goto yy198; default: goto yy25; } } #line 125 "implicit.re" } /* Remove ending fragment and compare types */ int syck_tagcmp( char *tag1, char *tag2 ) { if ( tag1 == tag2 ) return 1; if ( tag1 == NULL || tag2 == NULL ) return 0; else { int i; char *othorpe; char *tmp1 = syck_strndup( tag1, strlen( tag1 ) ); char *tmp2 = syck_strndup( tag2, strlen( tag2 ) ); othorpe = strstr( tmp1, "#" ); if ( othorpe != NULL ) { othorpe[0] = '\0'; } othorpe = strstr( tmp2, "#" ); if ( othorpe != NULL ) { othorpe[0] = '\0'; } i = strcmp( tmp1, tmp2 ); S_FREE( tmp1 ); S_FREE( tmp2 ); return i; } } char * syck_type_id_to_uri( char *type_id ) { char *cursor, *limit, *marker; cursor = type_id; limit = type_id + strlen( type_id ); #line 1552 "" { YYCTYPE yych; unsigned int yyaccept; goto yy202; yy203: ++YYCURSOR; yy202: if((YYLIMIT - YYCURSOR) < 21) YYFILL(21); yych = *YYCURSOR; switch(yych){ case '\000': goto yy204; case '!': goto yy208; case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': case 'A': case 'B': case 'C': case 'D': case 'E': case 'F': case 'G': case 'H': case 'I': case 'J': case 'K': case 'L': case 'M': case 'N': case 'O': case 'P': case 'Q': case 'R': case 'S': case 'T': case 'U': case 'V': case 'W': case 'X': case 'Y': case 'Z': case '_': case 'a': case 'b': case 'c': case 'd': case 'e': case 'f': case 'g': case 'h': case 'i': case 'j': case 'k': case 'l': case 'm': case 'n': case 'o': case 'p': case 'q': case 'r': case 's': case 'u': case 'v': case 'w': case 'y': case 'z': goto yy210; case 't': goto yy205; case 'x': goto yy207; default: goto yy211; } yy204: YYCURSOR = YYMARKER; switch(yyaccept){ case 0: goto yy206; } yy205: yyaccept = 0; yych = *(YYMARKER = ++YYCURSOR); switch(yych){ case ',': goto yy216; case '-': goto yy212; case '.': goto yy217; case '/': goto yy218; case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': case 'A': case 'B': case 'C': case 'D': case 'E': case 'F': case 'G': case 'H': case 'I': case 'J': case 'K': case 'L': case 'M': case 'N': case 'O': case 'P': case 'Q': case 'R': case 'S': case 'T': case 'U': case 'V': case 'W': case 'X': case 'Y': case 'Z': case '_': case 'b': case 'c': case 'd': case 'e': case 'f': case 'g': case 'h': case 'i': case 'j': case 'k': case 'l': case 'm': case 'n': case 'o': case 'p': case 'q': case 'r': case 's': case 't': case 'u': case 'v': case 'w': case 'x': case 'y': case 'z': goto yy214; case 'a': goto yy246; default: goto yy206; } yy206: #line 202 "implicit.re" { return syck_taguri( YAML_DOMAIN, type_id, strlen( type_id ) ); } #line 1700 "" yy207: yyaccept = 0; yych = *(YYMARKER = ++YYCURSOR); switch(yych){ case ',': case '.': case '/': case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': case 'A': case 'B': case 'C': case 'D': case 'E': case 'F': case 'G': case 'H': case 'I': case 'J': case 'K': case 'L': case 'M': case 'N': case 'O': case 'P': case 'Q': case 'R': case 'S': case 'T': case 'U': case 'V': case 'W': case 'X': case 'Y': case 'Z': case '_': case 'a': case 'b': case 'c': case 'd': case 'e': case 'f': case 'g': case 'h': case 'i': case 'j': case 'k': case 'l': case 'm': case 'n': case 'o': case 'p': case 'q': case 'r': case 's': case 't': case 'u': case 'v': case 'w': case 'x': case 'y': case 'z': goto yy215; case '-': goto yy236; default: goto yy206; } yy208: ++YYCURSOR; goto yy209; yy209: #line 176 "implicit.re" { return syck_xprivate( type_id + 1, strlen( type_id ) - 1 ); } #line 1774 "" yy210: yyaccept = 0; yych = *(YYMARKER = ++YYCURSOR); switch(yych){ case ',': goto yy216; case '-': goto yy212; case '.': goto yy217; case '/': goto yy218; case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': case 'A': case 'B': case 'C': case 'D': case 'E': case 'F': case 'G': case 'H': case 'I': case 'J': case 'K': case 'L': case 'M': case 'N': case 'O': case 'P': case 'Q': case 'R': case 'S': case 'T': case 'U': case 'V': case 'W': case 'X': case 'Y': case 'Z': case '_': case 'a': case 'b': case 'c': case 'd': case 'e': case 'f': case 'g': case 'h': case 'i': case 'j': case 'k': case 'l': case 'm': case 'n': case 'o': case 'p': case 'q': case 'r': case 's': case 't': case 'u': case 'v': case 'w': case 'x': case 'y': case 'z': goto yy214; default: goto yy206; } yy211: yych = *++YYCURSOR; goto yy206; yy212: ++YYCURSOR; if(YYLIMIT == YYCURSOR) YYFILL(1); yych = *YYCURSOR; goto yy213; yy213: switch(yych){ case '-': goto yy212; case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': case 'A': case 'B': case 'C': case 'D': case 'E': case 'F': case 'G': case 'H': case 'I': case 'J': case 'K': case 'L': case 'M': case 'N': case 'O': case 'P': case 'Q': case 'R': case 'S': case 'T': case 'U': case 'V': case 'W': case 'X': case 'Y': case 'Z': case '_': case 'a': case 'b': case 'c': case 'd': case 'e': case 'f': case 'g': case 'h': case 'i': case 'j': case 'k': case 'l': case 'm': case 'n': case 'o': case 'p': case 'q': case 'r': case 's': case 't': case 'u': case 'v': case 'w': case 'x': case 'y': case 'z': goto yy214; default: goto yy204; } yy214: ++YYCURSOR; if(YYLIMIT == YYCURSOR) YYFILL(1); yych = *YYCURSOR; goto yy215; yy215: switch(yych){ case ',': goto yy216; case '-': goto yy212; case '.': goto yy217; case '/': goto yy218; case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': case 'A': case 'B': case 'C': case 'D': case 'E': case 'F': case 'G': case 'H': case 'I': case 'J': case 'K': case 'L': case 'M': case 'N': case 'O': case 'P': case 'Q': case 'R': case 'S': case 'T': case 'U': case 'V': case 'W': case 'X': case 'Y': case 'Z': case '_': case 'a': case 'b': case 'c': case 'd': case 'e': case 'f': case 'g': case 'h': case 'i': case 'j': case 'k': case 'l': case 'm': case 'n': case 'o': case 'p': case 'q': case 'r': case 's': case 't': case 'u': case 'v': case 'w': case 'x': case 'y': case 'z': goto yy214; default: goto yy204; } yy216: yych = *++YYCURSOR; switch(yych){ case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': goto yy224; default: goto yy204; } yy217: ++YYCURSOR; if(YYLIMIT == YYCURSOR) YYFILL(1); yych = *YYCURSOR; switch(yych){ case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': case 'A': case 'B': case 'C': case 'D': case 'E': case 'F': case 'G': case 'H': case 'I': case 'J': case 'K': case 'L': case 'M': case 'N': case 'O': case 'P': case 'Q': case 'R': case 'S': case 'T': case 'U': case 'V': case 'W': case 'X': case 'Y': case 'Z': case '_': case 'a': case 'b': case 'c': case 'd': case 'e': case 'f': case 'g': case 'h': case 'i': case 'j': case 'k': case 'l': case 'm': case 'n': case 'o': case 'p': case 'q': case 'r': case 's': case 't': case 'u': case 'v': case 'w': case 'x': case 'y': case 'z': goto yy220; default: goto yy204; } yy218: ++YYCURSOR; goto yy219; yy219: #line 178 "implicit.re" { char *domain = S_ALLOC_N( char, ( YYCURSOR - type_id ) + 15 ); char *uri; domain[0] = '\0'; strncat( domain, type_id, ( YYCURSOR - type_id ) - 1 ); strcat( domain, "." ); strcat( domain, YAML_DOMAIN ); uri = syck_taguri( domain, YYCURSOR, YYLIMIT - YYCURSOR ); S_FREE( domain ); return uri; } #line 2084 "" yy220: ++YYCURSOR; if((YYLIMIT - YYCURSOR) < 2) YYFILL(2); yych = *YYCURSOR; goto yy221; yy221: switch(yych){ case ',': goto yy216; case '-': goto yy222; case '.': goto yy217; case '/': goto yy218; case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': case 'A': case 'B': case 'C': case 'D': case 'E': case 'F': case 'G': case 'H': case 'I': case 'J': case 'K': case 'L': case 'M': case 'N': case 'O': case 'P': case 'Q': case 'R': case 'S': case 'T': case 'U': case 'V': case 'W': case 'X': case 'Y': case 'Z': case '_': case 'a': case 'b': case 'c': case 'd': case 'e': case 'f': case 'g': case 'h': case 'i': case 'j': case 'k': case 'l': case 'm': case 'n': case 'o': case 'p': case 'q': case 'r': case 's': case 't': case 'u': case 'v': case 'w': case 'x': case 'y': case 'z': goto yy220; default: goto yy204; } yy222: ++YYCURSOR; if(YYLIMIT == YYCURSOR) YYFILL(1); yych = *YYCURSOR; goto yy223; yy223: switch(yych){ case '-': goto yy222; case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': case 'A': case 'B': case 'C': case 'D': case 'E': case 'F': case 'G': case 'H': case 'I': case 'J': case 'K': case 'L': case 'M': case 'N': case 'O': case 'P': case 'Q': case 'R': case 'S': case 'T': case 'U': case 'V': case 'W': case 'X': case 'Y': case 'Z': case '_': case 'a': case 'b': case 'c': case 'd': case 'e': case 'f': case 'g': case 'h': case 'i': case 'j': case 'k': case 'l': case 'm': case 'n': case 'o': case 'p': case 'q': case 'r': case 's': case 't': case 'u': case 'v': case 'w': case 'x': case 'y': case 'z': goto yy220; default: goto yy204; } yy224: yych = *++YYCURSOR; switch(yych){ case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': goto yy225; default: goto yy204; } yy225: yych = *++YYCURSOR; switch(yych){ case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': goto yy226; default: goto yy204; } yy226: yych = *++YYCURSOR; switch(yych){ case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': goto yy227; default: goto yy204; } yy227: yych = *++YYCURSOR; switch(yych){ case '-': goto yy228; case '/': goto yy229; default: goto yy204; } yy228: yych = *++YYCURSOR; switch(yych){ case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': goto yy231; default: goto yy204; } yy229: ++YYCURSOR; goto yy230; yy230: #line 191 "implicit.re" { char *domain = S_ALLOC_N( char, YYCURSOR - type_id ); char *uri; domain[0] = '\0'; strncat( domain, type_id, ( YYCURSOR - type_id ) - 1 ); uri = syck_taguri( domain, YYCURSOR, YYLIMIT - YYCURSOR ); S_FREE( domain ); return uri; } #line 2302 "" yy231: yych = *++YYCURSOR; switch(yych){ case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': goto yy232; default: goto yy204; } yy232: yych = *++YYCURSOR; switch(yych){ case '-': goto yy233; case '/': goto yy229; default: goto yy204; } yy233: yych = *++YYCURSOR; switch(yych){ case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': goto yy234; default: goto yy204; } yy234: yych = *++YYCURSOR; switch(yych){ case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': goto yy235; default: goto yy204; } yy235: yych = *++YYCURSOR; switch(yych){ case '/': goto yy229; default: goto yy204; } yy236: yych = *++YYCURSOR; switch(yych){ case 'p': goto yy237; default: goto yy213; } yy237: yych = *++YYCURSOR; switch(yych){ case ',': goto yy216; case '.': goto yy217; case '/': goto yy218; case 'r': goto yy238; default: goto yy213; } yy238: yych = *++YYCURSOR; switch(yych){ case ',': goto yy216; case '.': goto yy217; case '/': goto yy218; case 'i': goto yy239; default: goto yy213; } yy239: yych = *++YYCURSOR; switch(yych){ case ',': goto yy216; case '.': goto yy217; case '/': goto yy218; case 'v': goto yy240; default: goto yy213; } yy240: yych = *++YYCURSOR; switch(yych){ case ',': goto yy216; case '.': goto yy217; case '/': goto yy218; case 'a': goto yy241; default: goto yy213; } yy241: yych = *++YYCURSOR; switch(yych){ case ',': goto yy216; case '.': goto yy217; case '/': goto yy218; case 't': goto yy242; default: goto yy213; } yy242: yych = *++YYCURSOR; switch(yych){ case ',': goto yy216; case '.': goto yy217; case '/': goto yy218; case 'e': goto yy243; default: goto yy213; } yy243: yych = *++YYCURSOR; switch(yych){ case ',': goto yy216; case '.': goto yy217; case '/': goto yy218; case ':': goto yy244; default: goto yy213; } yy244: ++YYCURSOR; goto yy245; yy245: #line 174 "implicit.re" { return type_id; } #line 2422 "" yy246: yych = *++YYCURSOR; switch(yych){ case ',': goto yy216; case '.': goto yy217; case '/': goto yy218; case 'g': goto yy247; default: goto yy213; } yy247: yych = *++YYCURSOR; switch(yych){ case ',': goto yy216; case '.': goto yy217; case '/': goto yy218; case ':': goto yy248; default: goto yy213; } yy248: yych = *++YYCURSOR; switch(yych){ case ',': case '-': case '.': goto yy204; default: goto yy250; } yy249: ++YYCURSOR; if(YYLIMIT == YYCURSOR) YYFILL(1); yych = *YYCURSOR; goto yy250; yy250: switch(yych){ case ',': goto yy253; case '-': goto yy251; case '.': goto yy254; case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': case 'A': case 'B': case 'C': case 'D': case 'E': case 'F': case 'G': case 'H': case 'I': case 'J': case 'K': case 'L': case 'M': case 'N': case 'O': case 'P': case 'Q': case 'R': case 'S': case 'T': case 'U': case 'V': case 'W': case 'X': case 'Y': case 'Z': case '_': case 'a': case 'b': case 'c': case 'd': case 'e': case 'f': case 'g': case 'h': case 'i': case 'j': case 'k': case 'l': case 'm': case 'n': case 'o': case 'p': case 'q': case 'r': case 's': case 't': case 'u': case 'v': case 'w': case 'x': case 'y': case 'z': goto yy249; default: goto yy204; } yy251: ++YYCURSOR; if(YYLIMIT == YYCURSOR) YYFILL(1); yych = *YYCURSOR; goto yy252; yy252: switch(yych){ case '-': goto yy251; case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': case 'A': case 'B': case 'C': case 'D': case 'E': case 'F': case 'G': case 'H': case 'I': case 'J': case 'K': case 'L': case 'M': case 'N': case 'O': case 'P': case 'Q': case 'R': case 'S': case 'T': case 'U': case 'V': case 'W': case 'X': case 'Y': case 'Z': case '_': case 'a': case 'b': case 'c': case 'd': case 'e': case 'f': case 'g': case 'h': case 'i': case 'j': case 'k': case 'l': case 'm': case 'n': case 'o': case 'p': case 'q': case 'r': case 's': case 't': case 'u': case 'v': case 'w': case 'x': case 'y': case 'z': goto yy249; default: goto yy204; } yy253: yych = *++YYCURSOR; switch(yych){ case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': goto yy259; default: goto yy204; } yy254: ++YYCURSOR; if(YYLIMIT == YYCURSOR) YYFILL(1); yych = *YYCURSOR; switch(yych){ case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': case 'A': case 'B': case 'C': case 'D': case 'E': case 'F': case 'G': case 'H': case 'I': case 'J': case 'K': case 'L': case 'M': case 'N': case 'O': case 'P': case 'Q': case 'R': case 'S': case 'T': case 'U': case 'V': case 'W': case 'X': case 'Y': case 'Z': case '_': case 'a': case 'b': case 'c': case 'd': case 'e': case 'f': case 'g': case 'h': case 'i': case 'j': case 'k': case 'l': case 'm': case 'n': case 'o': case 'p': case 'q': case 'r': case 's': case 't': case 'u': case 'v': case 'w': case 'x': case 'y': case 'z': goto yy255; default: goto yy204; } yy255: ++YYCURSOR; if((YYLIMIT - YYCURSOR) < 2) YYFILL(2); yych = *YYCURSOR; goto yy256; yy256: switch(yych){ case ',': goto yy253; case '-': goto yy257; case '.': goto yy254; case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': case 'A': case 'B': case 'C': case 'D': case 'E': case 'F': case 'G': case 'H': case 'I': case 'J': case 'K': case 'L': case 'M': case 'N': case 'O': case 'P': case 'Q': case 'R': case 'S': case 'T': case 'U': case 'V': case 'W': case 'X': case 'Y': case 'Z': case '_': case 'a': case 'b': case 'c': case 'd': case 'e': case 'f': case 'g': case 'h': case 'i': case 'j': case 'k': case 'l': case 'm': case 'n': case 'o': case 'p': case 'q': case 'r': case 's': case 't': case 'u': case 'v': case 'w': case 'x': case 'y': case 'z': goto yy255; default: goto yy204; } yy257: ++YYCURSOR; if(YYLIMIT == YYCURSOR) YYFILL(1); yych = *YYCURSOR; goto yy258; yy258: switch(yych){ case '-': goto yy257; case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': case 'A': case 'B': case 'C': case 'D': case 'E': case 'F': case 'G': case 'H': case 'I': case 'J': case 'K': case 'L': case 'M': case 'N': case 'O': case 'P': case 'Q': case 'R': case 'S': case 'T': case 'U': case 'V': case 'W': case 'X': case 'Y': case 'Z': case '_': case 'a': case 'b': case 'c': case 'd': case 'e': case 'f': case 'g': case 'h': case 'i': case 'j': case 'k': case 'l': case 'm': case 'n': case 'o': case 'p': case 'q': case 'r': case 's': case 't': case 'u': case 'v': case 'w': case 'x': case 'y': case 'z': goto yy255; default: goto yy204; } yy259: yych = *++YYCURSOR; switch(yych){ case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': goto yy260; default: goto yy204; } yy260: yych = *++YYCURSOR; switch(yych){ case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': goto yy261; default: goto yy204; } yy261: yych = *++YYCURSOR; switch(yych){ case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': goto yy262; default: goto yy204; } yy262: yych = *++YYCURSOR; switch(yych){ case '-': goto yy263; case ':': goto yy264; default: goto yy204; } yy263: yych = *++YYCURSOR; switch(yych){ case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': goto yy266; default: goto yy204; } yy264: ++YYCURSOR; goto yy265; yy265: #line 172 "implicit.re" { return type_id; } #line 2874 "" yy266: yych = *++YYCURSOR; switch(yych){ case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': goto yy267; default: goto yy204; } yy267: yych = *++YYCURSOR; switch(yych){ case '-': goto yy268; case ':': goto yy264; default: goto yy204; } yy268: yych = *++YYCURSOR; switch(yych){ case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': goto yy269; default: goto yy204; } yy269: yych = *++YYCURSOR; switch(yych){ case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': goto yy270; default: goto yy204; } yy270: ++YYCURSOR; switch((yych = *YYCURSOR)) { case ':': goto yy264; default: goto yy204; } } #line 204 "implicit.re" } ruby-mkrf-0.2.3.orig/test/sample_files/syck-0.55/lib/token.c0000644000000000000000000016166211672453175022130 0ustar rootroot/* Generated by re2c 0.9.3 on Tue Apr 12 21:11:14 2005 */ #line 1 "token.re" /* * token.re * * $Author: why $ * $Date: 2005/04/13 06:27:54 $ * * Copyright (C) 2003 why the lucky stiff */ #include "syck.h" #include "gram.h" /* * Allocate quoted strings in chunks */ #define QUOTELEN 1024 /* * They do my bidding... */ #define YYCTYPE char #define YYCURSOR parser->cursor #define YYMARKER parser->marker #define YYLIMIT parser->limit #define YYTOKEN parser->token #define YYTOKTMP parser->toktmp #define YYLINEPTR parser->lineptr #define YYLINECTPTR parser->linectptr #define YYLINE parser->linect #define YYFILL(n) syck_parser_read(parser) /* * Repositions the cursor at `n' offset from the token start. * Only works in `Header' and `Document' sections. */ #define YYPOS(n) YYCURSOR = YYTOKEN + n /* * Track line numbers */ #define NEWLINE(ptr) YYLINEPTR = ptr + newline_len(ptr); if ( YYLINEPTR > YYLINECTPTR ) { YYLINE++; YYLINECTPTR = YYLINEPTR; } /* * I like seeing the level operations as macros... */ #define ADD_LEVEL(len, status) syck_parser_add_level( parser, len, status ) #define POP_LEVEL() syck_parser_pop_level( parser ) #define CURRENT_LEVEL() syck_parser_current_level( parser ) /* * Force a token next time around sycklex() */ #define FORCE_NEXT_TOKEN(tok) parser->force_token = tok; /* * Nice little macro to ensure we're YAML_IOPENed to the current level. * * Only use this macro in the "Document" section * */ #define ENSURE_YAML_IOPEN(last_lvl, to_len, reset) \ if ( last_lvl->spaces < to_len ) \ { \ if ( last_lvl->status == syck_lvl_iseq || last_lvl->status == syck_lvl_imap ) \ { \ goto Document; \ } \ else \ { \ ADD_LEVEL( to_len, syck_lvl_doc ); \ if ( reset == 1 ) YYPOS(0); \ return YAML_IOPEN; \ } \ } /* * Nice little macro to ensure closure of levels. * * Only use this macro in the "Document" section * */ #define ENSURE_YAML_IEND(last_lvl, to_len) \ if ( last_lvl->spaces > to_len ) \ { \ syck_parser_pop_level( parser ); \ YYPOS(0); \ return YAML_IEND; \ } /* * Concatenates quoted string items and manages allocation * to the quoted string */ #define QUOTECAT(s, c, i, l) \ { \ if ( i + 1 >= c ) \ { \ c += QUOTELEN; \ S_REALLOC_N( s, char, c ); \ } \ s[i++] = l; \ s[i] = '\0'; \ } #define QUOTECATS(s, c, i, cs, cl) \ { \ while ( i + cl >= c ) \ { \ c += QUOTELEN; \ S_REALLOC_N( s, char, c ); \ } \ S_MEMCPY( s + i, cs, char, cl ); \ i += cl; \ s[i] = '\0'; \ } /* * Tags a plain scalar with a transfer method * * Use only in "Plain" section * */ #define RETURN_IMPLICIT() \ { \ SyckNode *n = syck_alloc_str(); \ YYCURSOR = YYTOKEN; \ n->data.str->ptr = qstr; \ n->data.str->len = qidx; \ n->data.str->style = scalar_plain; \ sycklval->nodeData = n; \ if ( parser->implicit_typing == 1 ) \ { \ try_tag_implicit( sycklval->nodeData, parser->taguri_expansion ); \ } \ return YAML_PLAIN; \ } /* concat the inline characters to the plain scalar */ #define PLAIN_NOT_INL() \ if ( *(YYCURSOR - 1) == ' ' || is_newline( YYCURSOR - 1 ) ) \ { \ YYCURSOR--; \ } \ QUOTECATS(qstr, qcapa, qidx, YYTOKEN, YYCURSOR - YYTOKEN); \ goto Plain2; /* trim spaces off the end in case of indent */ #define PLAIN_IS_INL() \ char *walker = qstr + qidx - 1; \ while ( walker > qstr && ( *walker == '\n' || *walker == ' ' ) ) \ { \ qidx--; \ walker[0] = '\0'; \ walker--; \ } /* * Keep or chomp block? * * Use only in "ScalarBlock" section * */ #define RETURN_YAML_BLOCK() \ { \ SyckNode *n = syck_alloc_str(); \ if ( ((SyckParser *)parser)->taguri_expansion == 1 ) \ { \ n->type_id = syck_taguri( YAML_DOMAIN, "str", 3 ); \ } \ else \ { \ n->type_id = syck_strndup( "str", 3 ); \ } \ n->data.str->ptr = qstr; \ n->data.str->len = qidx; \ if ( blockType == BLOCK_LIT ) { \ n->data.str->style = scalar_literal; \ } else { \ n->data.str->style = scalar_fold; \ } \ if ( qidx > 0 ) \ { \ if ( nlDoWhat != NL_KEEP ) \ { \ char *fc = n->data.str->ptr + n->data.str->len - 1; \ while ( is_newline( fc ) ) fc--; \ if ( nlDoWhat != NL_CHOMP && fc < n->data.str->ptr + n->data.str->len - 1 ) \ fc += 1; \ n->data.str->len = fc - n->data.str->ptr + 1; \ } \ } \ sycklval->nodeData = n; \ return YAML_BLOCK; \ } /* * Handles newlines, calculates indent */ #define GOBBLE_UP_YAML_INDENT( ict, start ) \ char *indent = start; \ NEWLINE(indent); \ while ( indent < YYCURSOR ) \ { \ if ( is_newline( ++indent ) ) \ { \ NEWLINE(indent); \ } \ } \ ict = 0; \ if ( *YYCURSOR == '\0' ) \ { \ ict = -1; \ start = YYCURSOR - 1; \ } \ else if ( *YYLINEPTR == ' ' ) \ { \ ict = YYCURSOR - YYLINEPTR; \ } /* * If an indent exists at the current level, back up. */ #define GET_TRUE_YAML_INDENT(indt_len) \ { \ SyckLevel *lvl_deep = CURRENT_LEVEL(); \ indt_len = lvl_deep->spaces; \ if ( indt_len == YYTOKEN - YYLINEPTR ) \ { \ SyckLevel *lvl_over; \ parser->lvl_idx--; \ lvl_over = CURRENT_LEVEL(); \ indt_len = lvl_over->spaces; \ parser->lvl_idx++; \ } \ } /* * Argjh! I hate globals! Here for syckerror() only! */ SyckParser *syck_parser_ptr = NULL; /* * Accessory funcs later in this file. */ void eat_comments( SyckParser * ); char escape_seq( char ); int is_newline( char *ptr ); int newline_len( char *ptr ); int sycklex_yaml_utf8( YYSTYPE *, SyckParser * ); int sycklex_bytecode_utf8( YYSTYPE *, SyckParser * ); int syckwrap(); /* * My own re-entrant sycklex() using re2c. * You really get used to the limited regexp. * It's really nice to not rely on backtracking and such. */ int sycklex( YYSTYPE *sycklval, SyckParser *parser ) { switch ( parser->input_type ) { case syck_yaml_utf8: return sycklex_yaml_utf8( sycklval, parser ); case syck_yaml_utf16: syckerror( "UTF-16 is not currently supported in Syck.\nPlease contribute code to help this happen!" ); break; case syck_yaml_utf32: syckerror( "UTF-32 is not currently supported in Syck.\nPlease contribute code to help this happen!" ); break; case syck_bytecode_utf8: return sycklex_bytecode_utf8( sycklval, parser ); } } /* * Parser for standard YAML [UTF-8] */ int sycklex_yaml_utf8( YYSTYPE *sycklval, SyckParser *parser ) { int doc_level = 0; syck_parser_ptr = parser; if ( YYCURSOR == NULL ) { syck_parser_read( parser ); } if ( parser->force_token != 0 ) { int t = parser->force_token; parser->force_token = 0; return t; } #line 312 "token.re" if ( YYLINEPTR != YYCURSOR ) { goto Document; } Header: YYTOKEN = YYCURSOR; #line 7 "" { YYCTYPE yych; unsigned int yyaccept; goto yy0; yy1: ++YYCURSOR; yy0: if((YYLIMIT - YYCURSOR) < 5) YYFILL(5); yych = *YYCURSOR; switch(yych){ case '\000': goto yy7; case '\n': goto yy9; case '\r': goto yy11; case ' ': goto yy12; case '#': goto yy5; case '-': goto yy2; case '.': goto yy4; default: goto yy14; } yy2: yyaccept = 0; yych = *(YYMARKER = ++YYCURSOR); switch(yych){ case '-': goto yy28; default: goto yy3; } yy3: #line 371 "token.re" { YYPOS(0); goto Document; } #line 37 "" yy4: yyaccept = 0; yych = *(YYMARKER = ++YYCURSOR); switch(yych){ case '.': goto yy21; default: goto yy3; } yy5: ++YYCURSOR; goto yy6; yy6: #line 353 "token.re" { eat_comments( parser ); goto Header; } #line 51 "" yy7: ++YYCURSOR; goto yy8; yy8: #line 357 "token.re" { SyckLevel *lvl = CURRENT_LEVEL(); ENSURE_YAML_IEND(lvl, -1); YYPOS(0); return 0; } #line 61 "" yy9: yyaccept = 1; yych = *(YYMARKER = ++YYCURSOR); goto yy18; yy10: #line 363 "token.re" { GOBBLE_UP_YAML_INDENT( doc_level, YYTOKEN ); goto Header; } #line 70 "" yy11: yych = *++YYCURSOR; switch(yych){ case '\n': goto yy17; default: goto yy3; } yy12: ++YYCURSOR; yych = *YYCURSOR; goto yy16; yy13: #line 367 "token.re" { doc_level = YYCURSOR - YYLINEPTR; goto Header; } #line 83 "" yy14: yych = *++YYCURSOR; goto yy3; yy15: ++YYCURSOR; if(YYLIMIT == YYCURSOR) YYFILL(1); yych = *YYCURSOR; goto yy16; yy16: switch(yych){ case ' ': goto yy15; default: goto yy13; } yy17: yyaccept = 1; YYMARKER = ++YYCURSOR; if(YYLIMIT == YYCURSOR) YYFILL(1); yych = *YYCURSOR; goto yy18; yy18: switch(yych){ case '\n': case ' ': goto yy17; case '\r': goto yy19; default: goto yy10; } yy19: ++YYCURSOR; if(YYLIMIT == YYCURSOR) YYFILL(1); yych = *YYCURSOR; switch(yych){ case '\n': goto yy17; default: goto yy20; } yy20: YYCURSOR = YYMARKER; switch(yyaccept){ case 1: goto yy10; case 0: goto yy3; } yy21: yych = *++YYCURSOR; switch(yych){ case '.': goto yy22; default: goto yy20; } yy22: yych = *++YYCURSOR; switch(yych){ case '\n': goto yy23; case '\r': goto yy27; case ' ': goto yy25; default: goto yy20; } yy23: ++YYCURSOR; goto yy24; yy24: #line 339 "token.re" { SyckLevel *lvl = CURRENT_LEVEL(); if ( lvl->status == syck_lvl_header ) { goto Header; } else { ENSURE_YAML_IEND(lvl, -1); YYPOS(0); return 0; } return 0; } #line 147 "" yy25: ++YYCURSOR; if(YYLIMIT == YYCURSOR) YYFILL(1); yych = *YYCURSOR; goto yy26; yy26: switch(yych){ case ' ': goto yy25; default: goto yy24; } yy27: yych = *++YYCURSOR; switch(yych){ case '\n': goto yy23; default: goto yy20; } yy28: yych = *++YYCURSOR; switch(yych){ case '-': goto yy29; default: goto yy20; } yy29: yych = *++YYCURSOR; switch(yych){ case '\n': goto yy30; case '\r': goto yy34; case ' ': goto yy32; default: goto yy20; } yy30: ++YYCURSOR; goto yy31; yy31: #line 325 "token.re" { SyckLevel *lvl = CURRENT_LEVEL(); if ( lvl->status == syck_lvl_header ) { YYPOS(3); goto Directive; } else { ENSURE_YAML_IEND(lvl, -1); YYPOS(0); return 0; } } #line 191 "" yy32: ++YYCURSOR; if(YYLIMIT == YYCURSOR) YYFILL(1); yych = *YYCURSOR; goto yy33; yy33: switch(yych){ case ' ': goto yy32; default: goto yy31; } yy34: ++YYCURSOR; switch((yych = *YYCURSOR)) { case '\n': goto yy30; default: goto yy20; } } #line 375 "token.re" Document: { SyckLevel *lvl = CURRENT_LEVEL(); if ( lvl->status == syck_lvl_header ) { lvl->status = syck_lvl_doc; } YYTOKEN = YYCURSOR; #line 209 "" { YYCTYPE yych; unsigned int yyaccept; goto yy35; yy36: ++YYCURSOR; yy35: if((YYLIMIT - YYCURSOR) < 3) YYFILL(3); yych = *YYCURSOR; switch(yych){ case '\000': goto yy62; case '\n': goto yy37; case '\r': goto yy39; case ' ': goto yy60; case '!': goto yy51; case '"': goto yy55; case '#': goto yy58; case '&': goto yy49; case '\'': goto yy53; case '*': goto yy50; case ',': case ':': goto yy47; case '-': case '?': goto yy48; case '>': case '|': goto yy57; case '[': goto yy41; case ']': case '}': goto yy45; case '{': goto yy43; default: goto yy64; } yy37: yyaccept = 0; yych = *(YYMARKER = ++YYCURSOR); goto yy92; yy38: #line 389 "token.re" { /* Isolate spaces */ int indt_len; GOBBLE_UP_YAML_INDENT( indt_len, YYTOKEN ); lvl = CURRENT_LEVEL(); doc_level = 0; /* XXX: Comment lookahead */ if ( *YYCURSOR == '#' ) { goto Document; } /* Ignore indentation inside inlines */ if ( lvl->status == syck_lvl_iseq || lvl->status == syck_lvl_imap ) { goto Document; } /* Check for open indent */ ENSURE_YAML_IEND(lvl, indt_len); ENSURE_YAML_IOPEN(lvl, indt_len, 0); if ( indt_len == -1 ) { return 0; } return YAML_INDENT; } #line 269 "" yy39: ++YYCURSOR; switch((yych = *YYCURSOR)) { case '\n': goto yy91; default: goto yy40; } yy40: #line 493 "token.re" { ENSURE_YAML_IOPEN(lvl, doc_level, 1); goto Plain; } #line 280 "" yy41: ++YYCURSOR; goto yy42; yy42: #line 417 "token.re" { ENSURE_YAML_IOPEN(lvl, doc_level, 1); lvl = CURRENT_LEVEL(); ADD_LEVEL(lvl->spaces + 1, syck_lvl_iseq); return YYTOKEN[0]; } #line 290 "" yy43: ++YYCURSOR; goto yy44; yy44: #line 423 "token.re" { ENSURE_YAML_IOPEN(lvl, doc_level, 1); lvl = CURRENT_LEVEL(); ADD_LEVEL(lvl->spaces + 1, syck_lvl_imap); return YYTOKEN[0]; } #line 300 "" yy45: ++YYCURSOR; goto yy46; yy46: #line 429 "token.re" { POP_LEVEL(); return YYTOKEN[0]; } #line 308 "" yy47: yyaccept = 1; yych = *(YYMARKER = ++YYCURSOR); switch(yych){ case '\n': goto yy86; case '\r': goto yy90; case ' ': goto yy88; default: goto yy40; } yy48: yyaccept = 1; yych = *(YYMARKER = ++YYCURSOR); switch(yych){ case '\n': goto yy81; case '\r': goto yy85; case ' ': goto yy83; default: goto yy40; } yy49: yych = *++YYCURSOR; switch(yych){ case '-': case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': case 'A': case 'B': case 'C': case 'D': case 'E': case 'F': case 'G': case 'H': case 'I': case 'J': case 'K': case 'L': case 'M': case 'N': case 'O': case 'P': case 'Q': case 'R': case 'S': case 'T': case 'U': case 'V': case 'W': case 'X': case 'Y': case 'Z': case '_': case 'a': case 'b': case 'c': case 'd': case 'e': case 'f': case 'g': case 'h': case 'i': case 'j': case 'k': case 'l': case 'm': case 'n': case 'o': case 'p': case 'q': case 'r': case 's': case 't': case 'u': case 'v': case 'w': case 'x': case 'y': case 'z': goto yy78; default: goto yy40; } yy50: yych = *++YYCURSOR; switch(yych){ case '-': case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': case 'A': case 'B': case 'C': case 'D': case 'E': case 'F': case 'G': case 'H': case 'I': case 'J': case 'K': case 'L': case 'M': case 'N': case 'O': case 'P': case 'Q': case 'R': case 'S': case 'T': case 'U': case 'V': case 'W': case 'X': case 'Y': case 'Z': case '_': case 'a': case 'b': case 'c': case 'd': case 'e': case 'f': case 'g': case 'h': case 'i': case 'j': case 'k': case 'l': case 'm': case 'n': case 'o': case 'p': case 'q': case 'r': case 's': case 't': case 'u': case 'v': case 'w': case 'x': case 'y': case 'z': goto yy75; default: goto yy40; } yy51: ++YYCURSOR; goto yy52; yy52: #line 467 "token.re" { goto TransferMethod; } #line 458 "" yy53: ++YYCURSOR; goto yy54; yy54: #line 469 "token.re" { ENSURE_YAML_IOPEN(lvl, doc_level, 1); goto SingleQuote; } #line 465 "" yy55: ++YYCURSOR; goto yy56; yy56: #line 472 "token.re" { ENSURE_YAML_IOPEN(lvl, doc_level, 1); goto DoubleQuote; } #line 472 "" yy57: yyaccept = 1; yych = *(YYMARKER = ++YYCURSOR); switch(yych){ case '\n': goto yy70; case '\r': goto yy74; case ' ': goto yy72; case '+': case '-': case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': goto yy67; default: goto yy40; } yy58: ++YYCURSOR; goto yy59; yy59: #line 482 "token.re" { eat_comments( parser ); goto Document; } #line 498 "" yy60: ++YYCURSOR; yych = *YYCURSOR; goto yy66; yy61: #line 486 "token.re" { goto Document; } #line 504 "" yy62: ++YYCURSOR; goto yy63; yy63: #line 488 "token.re" { ENSURE_YAML_IEND(lvl, -1); YYPOS(0); return 0; } #line 513 "" yy64: yych = *++YYCURSOR; goto yy40; yy65: ++YYCURSOR; if(YYLIMIT == YYCURSOR) YYFILL(1); yych = *YYCURSOR; goto yy66; yy66: switch(yych){ case ' ': goto yy65; default: goto yy61; } yy67: ++YYCURSOR; if(YYLIMIT == YYCURSOR) YYFILL(1); yych = *YYCURSOR; goto yy68; yy68: switch(yych){ case '\n': goto yy70; case '\r': goto yy74; case ' ': goto yy72; case '+': case '-': case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': goto yy67; default: goto yy69; } yy69: YYCURSOR = YYMARKER; switch(yyaccept){ case 0: goto yy38; case 1: goto yy40; } yy70: ++YYCURSOR; goto yy71; yy71: #line 475 "token.re" { if ( is_newline( YYCURSOR - 1 ) ) { YYCURSOR--; } goto ScalarBlock; } #line 561 "" yy72: ++YYCURSOR; if(YYLIMIT == YYCURSOR) YYFILL(1); yych = *YYCURSOR; goto yy73; yy73: switch(yych){ case ' ': goto yy72; default: goto yy71; } yy74: yych = *++YYCURSOR; switch(yych){ case '\n': goto yy70; default: goto yy69; } yy75: ++YYCURSOR; if(YYLIMIT == YYCURSOR) YYFILL(1); yych = *YYCURSOR; goto yy76; yy76: switch(yych){ case '-': case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': case 'A': case 'B': case 'C': case 'D': case 'E': case 'F': case 'G': case 'H': case 'I': case 'J': case 'K': case 'L': case 'M': case 'N': case 'O': case 'P': case 'Q': case 'R': case 'S': case 'T': case 'U': case 'V': case 'W': case 'X': case 'Y': case 'Z': case '_': case 'a': case 'b': case 'c': case 'd': case 'e': case 'f': case 'g': case 'h': case 'i': case 'j': case 'k': case 'l': case 'm': case 'n': case 'o': case 'p': case 'q': case 'r': case 's': case 't': case 'u': case 'v': case 'w': case 'x': case 'y': case 'z': goto yy75; default: goto yy77; } yy77: #line 462 "token.re" { ENSURE_YAML_IOPEN(lvl, doc_level, 1); sycklval->name = syck_strndup( YYTOKEN + 1, YYCURSOR - YYTOKEN - 1 ); return YAML_ALIAS; } #line 650 "" yy78: ++YYCURSOR; if(YYLIMIT == YYCURSOR) YYFILL(1); yych = *YYCURSOR; goto yy79; yy79: switch(yych){ case '-': case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': case 'A': case 'B': case 'C': case 'D': case 'E': case 'F': case 'G': case 'H': case 'I': case 'J': case 'K': case 'L': case 'M': case 'N': case 'O': case 'P': case 'Q': case 'R': case 'S': case 'T': case 'U': case 'V': case 'W': case 'X': case 'Y': case 'Z': case '_': case 'a': case 'b': case 'c': case 'd': case 'e': case 'f': case 'g': case 'h': case 'i': case 'j': case 'k': case 'l': case 'm': case 'n': case 'o': case 'p': case 'q': case 'r': case 's': case 't': case 'u': case 'v': case 'w': case 'x': case 'y': case 'z': goto yy78; default: goto yy80; } yy80: #line 451 "token.re" { sycklval->name = syck_strndup( YYTOKEN + 1, YYCURSOR - YYTOKEN - 1 ); /* * Remove previous anchors of the same name. Since the parser will likely * construct deeper nodes first, we want those nodes to be placed in the * queue for matching at a higher level of indentation. */ syck_hdlr_remove_anchor(parser, sycklval->name); return YAML_ANCHOR; } #line 731 "" yy81: ++YYCURSOR; goto yy82; yy82: #line 437 "token.re" { ENSURE_YAML_IOPEN(lvl, YYTOKEN - YYLINEPTR, 1); FORCE_NEXT_TOKEN(YAML_IOPEN); if ( *YYCURSOR == '#' || is_newline( YYCURSOR ) || is_newline( YYCURSOR - 1 ) ) { YYCURSOR--; ADD_LEVEL((YYTOKEN + 1) - YYLINEPTR, syck_lvl_doc); } else /* spaces followed by content uses the space as indentation */ { ADD_LEVEL(YYCURSOR - YYLINEPTR, syck_lvl_doc); } return YYTOKEN[0]; } #line 749 "" yy83: ++YYCURSOR; if(YYLIMIT == YYCURSOR) YYFILL(1); yych = *YYCURSOR; goto yy84; yy84: switch(yych){ case ' ': goto yy83; default: goto yy82; } yy85: yych = *++YYCURSOR; switch(yych){ case '\n': goto yy81; default: goto yy69; } yy86: ++YYCURSOR; goto yy87; yy87: #line 433 "token.re" { YYPOS(1); return YYTOKEN[0]; } #line 771 "" yy88: ++YYCURSOR; if(YYLIMIT == YYCURSOR) YYFILL(1); yych = *YYCURSOR; goto yy89; yy89: switch(yych){ case ' ': goto yy88; default: goto yy87; } yy90: yych = *++YYCURSOR; switch(yych){ case '\n': goto yy86; default: goto yy69; } yy91: yyaccept = 0; YYMARKER = ++YYCURSOR; if(YYLIMIT == YYCURSOR) YYFILL(1); yych = *YYCURSOR; goto yy92; yy92: switch(yych){ case '\n': case ' ': goto yy91; case '\r': goto yy93; default: goto yy38; } yy93: ++YYCURSOR; if(YYLIMIT == YYCURSOR) YYFILL(1); yych = *YYCURSOR; switch(yych){ case '\n': goto yy91; default: goto yy69; } } #line 497 "token.re" } Directive: { YYTOKTMP = YYCURSOR; #line 807 "" { YYCTYPE yych; unsigned int yyaccept; goto yy94; yy95: ++YYCURSOR; yy94: if((YYLIMIT - YYCURSOR) < 2) YYFILL(2); yych = *YYCURSOR; switch(yych){ case '\000': goto yy96; case ' ': goto yy99; case '%': goto yy97; default: goto yy101; } yy96: YYCURSOR = YYMARKER; switch(yyaccept){ case 0: goto yy98; } yy97: yyaccept = 0; yych = *(YYMARKER = ++YYCURSOR); switch(yych){ case '.': case '/': case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': case ':': case ';': case '<': case '=': case '>': case '?': case '@': case 'A': case 'B': case 'C': case 'D': case 'E': case 'F': case 'G': case 'H': case 'I': case 'J': case 'K': case 'L': case 'M': case 'N': case 'O': case 'P': case 'Q': case 'R': case 'S': case 'T': case 'U': case 'V': case 'W': case 'X': case 'Y': case 'Z': case '[': case '\\': case ']': case '^': case '_': case 'a': case 'b': case 'c': case 'd': case 'e': case 'f': case 'g': case 'h': case 'i': case 'j': case 'k': case 'l': case 'm': case 'n': case 'o': case 'p': case 'q': case 'r': case 's': case 't': case 'u': case 'v': case 'w': case 'x': case 'y': case 'z': goto yy104; default: goto yy98; } yy98: #line 510 "token.re" { YYCURSOR = YYTOKTMP; return YAML_DOCSEP; } #line 911 "" yy99: ++YYCURSOR; yych = *YYCURSOR; goto yy103; yy100: #line 508 "token.re" { goto Directive; } #line 917 "" yy101: yych = *++YYCURSOR; goto yy98; yy102: ++YYCURSOR; if(YYLIMIT == YYCURSOR) YYFILL(1); yych = *YYCURSOR; goto yy103; yy103: switch(yych){ case ' ': goto yy102; default: goto yy100; } yy104: ++YYCURSOR; if(YYLIMIT == YYCURSOR) YYFILL(1); yych = *YYCURSOR; goto yy105; yy105: switch(yych){ case '.': case '/': case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': case ';': case '<': case '=': case '>': case '?': case '@': case 'A': case 'B': case 'C': case 'D': case 'E': case 'F': case 'G': case 'H': case 'I': case 'J': case 'K': case 'L': case 'M': case 'N': case 'O': case 'P': case 'Q': case 'R': case 'S': case 'T': case 'U': case 'V': case 'W': case 'X': case 'Y': case 'Z': case '[': case '\\': case ']': case '^': case '_': case 'a': case 'b': case 'c': case 'd': case 'e': case 'f': case 'g': case 'h': case 'i': case 'j': case 'k': case 'l': case 'm': case 'n': case 'o': case 'p': case 'q': case 'r': case 's': case 't': case 'u': case 'v': case 'w': case 'x': case 'y': case 'z': goto yy104; case ':': goto yy106; default: goto yy96; } yy106: yych = *++YYCURSOR; switch(yych){ case '.': case '/': case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': case ':': case ';': case '<': case '=': case '>': case '?': case '@': case 'A': case 'B': case 'C': case 'D': case 'E': case 'F': case 'G': case 'H': case 'I': case 'J': case 'K': case 'L': case 'M': case 'N': case 'O': case 'P': case 'Q': case 'R': case 'S': case 'T': case 'U': case 'V': case 'W': case 'X': case 'Y': case 'Z': case '[': case '\\': case ']': case '^': case '_': case 'a': case 'b': case 'c': case 'd': case 'e': case 'f': case 'g': case 'h': case 'i': case 'j': case 'k': case 'l': case 'm': case 'n': case 'o': case 'p': case 'q': case 'r': case 's': case 't': case 'u': case 'v': case 'w': case 'x': case 'y': case 'z': goto yy107; default: goto yy96; } yy107: ++YYCURSOR; if(YYLIMIT == YYCURSOR) YYFILL(1); yych = *YYCURSOR; goto yy108; yy108: switch(yych){ case '.': case '/': case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': case ':': case ';': case '<': case '=': case '>': case '?': case '@': case 'A': case 'B': case 'C': case 'D': case 'E': case 'F': case 'G': case 'H': case 'I': case 'J': case 'K': case 'L': case 'M': case 'N': case 'O': case 'P': case 'Q': case 'R': case 'S': case 'T': case 'U': case 'V': case 'W': case 'X': case 'Y': case 'Z': case '[': case '\\': case ']': case '^': case '_': case 'a': case 'b': case 'c': case 'd': case 'e': case 'f': case 'g': case 'h': case 'i': case 'j': case 'k': case 'l': case 'm': case 'n': case 'o': case 'p': case 'q': case 'r': case 's': case 't': case 'u': case 'v': case 'w': case 'x': case 'y': case 'z': goto yy107; default: goto yy109; } yy109: #line 506 "token.re" { goto Directive; } #line 1176 "" } #line 513 "token.re" } Plain: { int qidx = 0; int qcapa = 100; char *qstr = S_ALLOC_N( char, qcapa ); SyckLevel *plvl; int parentIndent; YYCURSOR = YYTOKEN; plvl = CURRENT_LEVEL(); GET_TRUE_YAML_INDENT(parentIndent); Plain2: YYTOKEN = YYCURSOR; Plain3: #line 1180 "" { YYCTYPE yych; unsigned int yyaccept; goto yy110; yy111: ++YYCURSOR; yy110: if((YYLIMIT - YYCURSOR) < 3) YYFILL(3); yych = *YYCURSOR; switch(yych){ case '\000': goto yy124; case '\n': goto yy112; case '\r': goto yy114; case ' ': goto yy122; case ',': goto yy117; case ':': goto yy116; case ']': goto yy120; case '}': goto yy118; default: goto yy126; } yy112: yyaccept = 0; yych = *(YYMARKER = ++YYCURSOR); goto yy141; yy113: #line 536 "token.re" { int indt_len, nl_count = 0; SyckLevel *lvl; char *tok = YYTOKEN; GOBBLE_UP_YAML_INDENT( indt_len, tok ); lvl = CURRENT_LEVEL(); if ( indt_len <= parentIndent ) { RETURN_IMPLICIT(); } while ( YYTOKEN < YYCURSOR ) { int nl_len = 0; if ( nl_len = newline_len( YYTOKEN++ ) ) { nl_count++; YYTOKEN += nl_len - 1; } } if ( nl_count <= 1 ) { QUOTECAT(qstr, qcapa, qidx, ' '); } else { int i; for ( i = 0; i < nl_count - 1; i++ ) { QUOTECAT(qstr, qcapa, qidx, '\n'); } } goto Plain2; } #line 1240 "" yy114: ++YYCURSOR; switch((yych = *YYCURSOR)) { case '\n': goto yy140; default: goto yy115; } yy115: #line 615 "token.re" { QUOTECATS(qstr, qcapa, qidx, YYTOKEN, YYCURSOR - YYTOKEN); goto Plain2; } #line 1251 "" yy116: yyaccept = 1; yych = *(YYMARKER = ++YYCURSOR); switch(yych){ case '\n': goto yy135; case '\r': goto yy139; case ' ': goto yy137; default: goto yy115; } yy117: yyaccept = 1; yych = *(YYMARKER = ++YYCURSOR); switch(yych){ case '\n': goto yy129; case '\r': goto yy133; case ' ': goto yy131; default: goto yy115; } yy118: ++YYCURSOR; goto yy119; yy119: #line 585 "token.re" { if ( plvl->status != syck_lvl_imap ) { PLAIN_NOT_INL(); } else { PLAIN_IS_INL(); } RETURN_IMPLICIT(); } #line 1282 "" yy120: ++YYCURSOR; goto yy121; yy121: #line 596 "token.re" { if ( plvl->status != syck_lvl_iseq ) { PLAIN_NOT_INL(); } else { PLAIN_IS_INL(); } RETURN_IMPLICIT(); } #line 1297 "" yy122: ++YYCURSOR; switch((yych = *YYCURSOR)) { case '#': goto yy127; default: goto yy123; } yy123: #line 613 "token.re" { goto Plain3; } #line 1306 "" yy124: ++YYCURSOR; goto yy125; yy125: #line 611 "token.re" { RETURN_IMPLICIT(); } #line 1312 "" yy126: yych = *++YYCURSOR; goto yy115; yy127: ++YYCURSOR; goto yy128; yy128: #line 607 "token.re" { eat_comments( parser ); RETURN_IMPLICIT(); } #line 1322 "" yy129: ++YYCURSOR; goto yy130; yy130: #line 574 "token.re" { if ( plvl->status != syck_lvl_iseq && plvl->status != syck_lvl_imap ) { PLAIN_NOT_INL(); } else { PLAIN_IS_INL(); } RETURN_IMPLICIT(); } #line 1337 "" yy131: ++YYCURSOR; if(YYLIMIT == YYCURSOR) YYFILL(1); yych = *YYCURSOR; goto yy132; yy132: switch(yych){ case ' ': goto yy131; default: goto yy130; } yy133: yych = *++YYCURSOR; switch(yych){ case '\n': goto yy129; default: goto yy134; } yy134: YYCURSOR = YYMARKER; switch(yyaccept){ case 0: goto yy113; case 1: goto yy115; } yy135: ++YYCURSOR; goto yy136; yy136: #line 572 "token.re" { RETURN_IMPLICIT(); } #line 1362 "" yy137: ++YYCURSOR; if(YYLIMIT == YYCURSOR) YYFILL(1); yych = *YYCURSOR; goto yy138; yy138: switch(yych){ case ' ': goto yy137; default: goto yy136; } yy139: yych = *++YYCURSOR; switch(yych){ case '\n': goto yy135; default: goto yy134; } yy140: yyaccept = 0; YYMARKER = ++YYCURSOR; if(YYLIMIT == YYCURSOR) YYFILL(1); yych = *YYCURSOR; goto yy141; yy141: switch(yych){ case '\n': case ' ': goto yy140; case '\r': goto yy142; default: goto yy113; } yy142: ++YYCURSOR; if(YYLIMIT == YYCURSOR) YYFILL(1); yych = *YYCURSOR; switch(yych){ case '\n': goto yy140; default: goto yy134; } } #line 619 "token.re" } SingleQuote: { int qidx = 0; int qcapa = 100; char *qstr = S_ALLOC_N( char, qcapa ); SingleQuote2: YYTOKEN = YYCURSOR; #line 1398 "" { YYCTYPE yych; unsigned int yyaccept; goto yy143; yy144: ++YYCURSOR; yy143: if((YYLIMIT - YYCURSOR) < 2) YYFILL(2); yych = *YYCURSOR; switch(yych){ case '\000': goto yy151; case '\n': goto yy145; case '\r': goto yy147; case '\'': goto yy149; default: goto yy152; } yy145: yyaccept = 0; yych = *(YYMARKER = ++YYCURSOR); goto yy156; yy146: #line 633 "token.re" { int indt_len; int nl_count = 0; SyckLevel *lvl; GOBBLE_UP_YAML_INDENT( indt_len, YYTOKEN ); lvl = CURRENT_LEVEL(); if ( lvl->status != syck_lvl_str ) { ADD_LEVEL( indt_len, syck_lvl_str ); } else if ( indt_len < lvl->spaces ) { /* Error! */ } while ( YYTOKEN < YYCURSOR ) { int nl_len = 0; if ( nl_len = newline_len( YYTOKEN++ ) ) { nl_count++; YYTOKEN += nl_len - 1; } } if ( nl_count <= 1 ) { QUOTECAT(qstr, qcapa, qidx, ' '); } else { int i; for ( i = 0; i < nl_count - 1; i++ ) { QUOTECAT(qstr, qcapa, qidx, '\n'); } } goto SingleQuote2; } #line 1458 "" yy147: ++YYCURSOR; switch((yych = *YYCURSOR)) { case '\n': goto yy155; default: goto yy148; } yy148: #line 700 "token.re" { QUOTECAT(qstr, qcapa, qidx, *(YYCURSOR - 1)); goto SingleQuote2; } #line 1469 "" yy149: ++YYCURSOR; switch((yych = *YYCURSOR)) { case '\'': goto yy153; default: goto yy150; } yy150: #line 677 "token.re" { SyckLevel *lvl; SyckNode *n = syck_alloc_str(); lvl = CURRENT_LEVEL(); if ( lvl->status == syck_lvl_str ) { POP_LEVEL(); } if ( ((SyckParser *)parser)->taguri_expansion == 1 ) { n->type_id = syck_taguri( YAML_DOMAIN, "str", 3 ); } else { n->type_id = syck_strndup( "str", 3 ); } n->data.str->ptr = qstr; n->data.str->len = qidx; n->data.str->style = scalar_1quote; sycklval->nodeData = n; return YAML_PLAIN; } #line 1499 "" yy151: yych = *++YYCURSOR; goto yy150; yy152: yych = *++YYCURSOR; goto yy148; yy153: ++YYCURSOR; goto yy154; yy154: #line 673 "token.re" { QUOTECAT(qstr, qcapa, qidx, '\''); goto SingleQuote2; } #line 1511 "" yy155: yyaccept = 0; YYMARKER = ++YYCURSOR; if(YYLIMIT == YYCURSOR) YYFILL(1); yych = *YYCURSOR; goto yy156; yy156: switch(yych){ case '\n': case ' ': goto yy155; case '\r': goto yy157; default: goto yy146; } yy157: ++YYCURSOR; if(YYLIMIT == YYCURSOR) YYFILL(1); yych = *YYCURSOR; switch(yych){ case '\n': goto yy155; default: goto yy158; } yy158: YYCURSOR = YYMARKER; switch(yyaccept){ case 0: goto yy146; } } #line 704 "token.re" } DoubleQuote: { int keep_nl = 1; int qidx = 0; int qcapa = 100; char *qstr = S_ALLOC_N( char, qcapa ); DoubleQuote2: YYTOKEN = YYCURSOR; #line 1537 "" { YYCTYPE yych; unsigned int yyaccept; goto yy159; yy160: ++YYCURSOR; yy159: if((YYLIMIT - YYCURSOR) < 4) YYFILL(4); yych = *YYCURSOR; switch(yych){ case '\000': goto yy166; case '\n': goto yy161; case '\r': goto yy163; case '"': goto yy168; case '\\': goto yy165; default: goto yy169; } yy161: yyaccept = 0; yych = *(YYMARKER = ++YYCURSOR); goto yy183; yy162: #line 722 "token.re" { int indt_len; int nl_count = 0; SyckLevel *lvl; GOBBLE_UP_YAML_INDENT( indt_len, YYTOKEN ); lvl = CURRENT_LEVEL(); if ( lvl->status != syck_lvl_str ) { ADD_LEVEL( indt_len, syck_lvl_str ); } else if ( indt_len < lvl->spaces ) { /* FIXME */ } if ( keep_nl == 1 ) { while ( YYTOKEN < YYCURSOR ) { int nl_len = 0; if ( nl_len = newline_len( YYTOKEN++ ) ) { nl_count++; YYTOKEN += nl_len - 1; } } if ( nl_count <= 1 ) { QUOTECAT(qstr, qcapa, qidx, ' '); } else { int i; for ( i = 0; i < nl_count - 1; i++ ) { QUOTECAT(qstr, qcapa, qidx, '\n'); } } } keep_nl = 1; goto DoubleQuote2; } #line 1602 "" yy163: ++YYCURSOR; switch((yych = *YYCURSOR)) { case '\n': goto yy182; default: goto yy164; } yy164: #line 808 "token.re" { QUOTECAT(qstr, qcapa, qidx, *(YYCURSOR - 1)); goto DoubleQuote2; } #line 1613 "" yy165: yyaccept = 1; yych = *(YYMARKER = ++YYCURSOR); switch(yych){ case '\n': goto yy173; case '\r': goto yy175; case ' ': goto yy170; case '"': case '0': case '\\': case 'a': case 'b': case 'e': case 'f': case 'n': case 'r': case 't': case 'v': goto yy177; case 'x': goto yy176; default: goto yy164; } yy166: ++YYCURSOR; goto yy167; yy167: #line 785 "token.re" { SyckLevel *lvl; SyckNode *n = syck_alloc_str(); lvl = CURRENT_LEVEL(); if ( lvl->status == syck_lvl_str ) { POP_LEVEL(); } if ( ((SyckParser *)parser)->taguri_expansion == 1 ) { n->type_id = syck_taguri( YAML_DOMAIN, "str", 3 ); } else { n->type_id = syck_strndup( "str", 3 ); } n->data.str->ptr = qstr; n->data.str->len = qidx; n->data.str->style = scalar_2quote; sycklval->nodeData = n; return YAML_PLAIN; } #line 1652 "" yy168: yych = *++YYCURSOR; goto yy167; yy169: yych = *++YYCURSOR; goto yy164; yy170: ++YYCURSOR; if(YYLIMIT == YYCURSOR) YYFILL(1); yych = *YYCURSOR; goto yy171; yy171: switch(yych){ case '\n': goto yy173; case '\r': goto yy175; case ' ': goto yy170; default: goto yy172; } yy172: YYCURSOR = YYMARKER; switch(yyaccept){ case 0: goto yy162; case 1: goto yy164; } yy173: ++YYCURSOR; goto yy174; yy174: #line 780 "token.re" { keep_nl = 0; YYCURSOR--; goto DoubleQuote2; } #line 1681 "" yy175: yych = *++YYCURSOR; switch(yych){ case '\n': goto yy173; default: goto yy172; } yy176: yych = *++YYCURSOR; switch(yych){ case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': case 'A': case 'B': case 'C': case 'D': case 'E': case 'F': case 'a': case 'b': case 'c': case 'd': case 'e': case 'f': goto yy179; default: goto yy172; } yy177: ++YYCURSOR; goto yy178; yy178: #line 766 "token.re" { char ch = *( YYCURSOR - 1 ); QUOTECAT(qstr, qcapa, qidx, escape_seq( ch )); goto DoubleQuote2; } #line 1719 "" yy179: yych = *++YYCURSOR; switch(yych){ case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': case 'A': case 'B': case 'C': case 'D': case 'E': case 'F': case 'a': case 'b': case 'c': case 'd': case 'e': case 'f': goto yy180; default: goto yy172; } yy180: ++YYCURSOR; goto yy181; yy181: #line 771 "token.re" { long ch; char *chr_text = syck_strndup( YYTOKEN, 4 ); chr_text[0] = '0'; ch = strtol( chr_text, NULL, 16 ); free( chr_text ); QUOTECAT(qstr, qcapa, qidx, ch); goto DoubleQuote2; } #line 1756 "" yy182: yyaccept = 0; YYMARKER = ++YYCURSOR; if(YYLIMIT == YYCURSOR) YYFILL(1); yych = *YYCURSOR; goto yy183; yy183: switch(yych){ case '\n': case ' ': goto yy182; case '\r': goto yy184; default: goto yy162; } yy184: ++YYCURSOR; if(YYLIMIT == YYCURSOR) YYFILL(1); yych = *YYCURSOR; switch(yych){ case '\n': goto yy182; default: goto yy172; } } #line 812 "token.re" } TransferMethod: { int qidx = 0; int qcapa = 100; char *qstr = S_ALLOC_N( char, qcapa ); TransferMethod2: YYTOKTMP = YYCURSOR; #line 1778 "" { YYCTYPE yych; unsigned int yyaccept; goto yy185; yy186: ++YYCURSOR; yy185: if((YYLIMIT - YYCURSOR) < 4) YYFILL(4); yych = *YYCURSOR; switch(yych){ case '\000': goto yy187; case '\n': goto yy189; case '\r': goto yy191; case ' ': goto yy190; case '\\': goto yy193; default: goto yy194; } yy187: ++YYCURSOR; goto yy188; yy188: #line 826 "token.re" { SyckLevel *lvl; YYCURSOR = YYTOKTMP; if ( YYCURSOR == YYTOKEN + 1 ) { free( qstr ); return YAML_ITRANSFER; } lvl = CURRENT_LEVEL(); /* * URL Prefixing */ if ( *qstr == '^' ) { sycklval->name = S_ALLOC_N( char, qidx + strlen( lvl->domain ) ); sycklval->name[0] = '\0'; strcat( sycklval->name, lvl->domain ); strncat( sycklval->name, qstr + 1, qidx - 1 ); free( qstr ); } else { char *carat = qstr; char *qend = qstr + qidx; while ( (++carat) < qend ) { if ( *carat == '^' ) break; } if ( carat < qend ) { free( lvl->domain ); lvl->domain = syck_strndup( qstr, carat - qstr ); sycklval->name = S_ALLOC_N( char, ( qend - carat ) + strlen( lvl->domain ) ); sycklval->name[0] = '\0'; strcat( sycklval->name, lvl->domain ); strncat( sycklval->name, carat + 1, ( qend - carat ) - 1 ); free( qstr ); } else { sycklval->name = qstr; } } return YAML_TRANSFER; } #line 1848 "" yy189: yych = *++YYCURSOR; goto yy188; yy190: yych = *++YYCURSOR; goto yy203; yy191: ++YYCURSOR; switch((yych = *YYCURSOR)) { case '\n': goto yy187; default: goto yy192; } yy192: #line 893 "token.re" { QUOTECAT(qstr, qcapa, qidx, *(YYCURSOR - 1)); goto TransferMethod2; } #line 1863 "" yy193: yyaccept = 0; yych = *(YYMARKER = ++YYCURSOR); switch(yych){ case '"': case '0': case '\\': case 'a': case 'b': case 'e': case 'f': case 'n': case 'r': case 't': case 'v': goto yy197; case 'x': goto yy195; default: goto yy192; } yy194: yych = *++YYCURSOR; goto yy192; yy195: yych = *++YYCURSOR; switch(yych){ case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': case 'A': case 'B': case 'C': case 'D': case 'E': case 'F': case 'a': case 'b': case 'c': case 'd': case 'e': case 'f': goto yy199; default: goto yy196; } yy196: YYCURSOR = YYMARKER; switch(yyaccept){ case 0: goto yy192; } yy197: ++YYCURSOR; goto yy198; yy198: #line 879 "token.re" { char ch = *( YYCURSOR - 1 ); QUOTECAT(qstr, qcapa, qidx, escape_seq( ch )); goto TransferMethod2; } #line 1911 "" yy199: yych = *++YYCURSOR; switch(yych){ case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': case 'A': case 'B': case 'C': case 'D': case 'E': case 'F': case 'a': case 'b': case 'c': case 'd': case 'e': case 'f': goto yy200; default: goto yy196; } yy200: ++YYCURSOR; goto yy201; yy201: #line 884 "token.re" { long ch; char *chr_text = syck_strndup( YYTOKTMP, 4 ); chr_text[0] = '0'; ch = strtol( chr_text, NULL, 16 ); free( chr_text ); QUOTECAT(qstr, qcapa, qidx, ch); goto TransferMethod2; } #line 1948 "" yy202: ++YYCURSOR; if(YYLIMIT == YYCURSOR) YYFILL(1); yych = *YYCURSOR; goto yy203; yy203: switch(yych){ case ' ': goto yy202; default: goto yy188; } } #line 898 "token.re" } ScalarBlock: { int qidx = 0; int qcapa = 100; char *qstr = S_ALLOC_N( char, qcapa ); int blockType = 0; int nlDoWhat = 0; int lastIndent = 0; int forceIndent = -1; char *yyt = YYTOKEN; SyckLevel *lvl = CURRENT_LEVEL(); int parentIndent; GET_TRUE_YAML_INDENT(parentIndent); switch ( *yyt ) { case '|': blockType = BLOCK_LIT; break; case '>': blockType = BLOCK_FOLD; break; } while ( ++yyt <= YYCURSOR ) { if ( *yyt == '-' ) { nlDoWhat = NL_CHOMP; } else if ( *yyt == '+' ) { nlDoWhat = NL_KEEP; } else if ( isdigit( *yyt ) ) { forceIndent = strtol( yyt, NULL, 10 ) + parentIndent; } } qstr[0] = '\0'; YYTOKEN = YYCURSOR; ScalarBlock2: YYTOKEN = YYCURSOR; #line 1961 "" { YYCTYPE yych; unsigned int yyaccept; goto yy204; yy205: ++YYCURSOR; yy204: if((YYLIMIT - YYCURSOR) < 5) YYFILL(5); yych = *YYCURSOR; switch(yych){ case '\000': goto yy212; case '\n': goto yy206; case '\r': goto yy208; case '#': goto yy210; case '-': goto yy214; default: goto yy215; } yy206: yyaccept = 0; yych = *(YYMARKER = ++YYCURSOR); goto yy225; yy207: #line 945 "token.re" { char *pacer; char *tok = YYTOKEN; int indt_len = 0, nl_count = 0, fold_nl = 0, nl_begin = 0; GOBBLE_UP_YAML_INDENT( indt_len, tok ); lvl = CURRENT_LEVEL(); if ( indt_len > parentIndent && lvl->status != syck_lvl_block ) { int new_spaces = forceIndent > 0 ? forceIndent : indt_len; ADD_LEVEL( new_spaces, syck_lvl_block ); lastIndent = indt_len - new_spaces; nl_begin = 1; lvl = CURRENT_LEVEL(); } else if ( lvl->status != syck_lvl_block ) { YYCURSOR = YYTOKEN; RETURN_YAML_BLOCK(); } /* * Fold only in the event of two lines being on the leftmost * indentation. */ if ( blockType == BLOCK_FOLD && lastIndent == 0 && ( indt_len - lvl->spaces ) == 0 ) { fold_nl = 1; } pacer = YYTOKEN; while ( pacer < YYCURSOR ) { int nl_len = 0; if ( nl_len = newline_len( pacer++ ) ) { nl_count++; pacer += nl_len - 1; } } if ( fold_nl == 1 || nl_begin == 1 ) { nl_count--; } if ( nl_count < 1 && nl_begin == 0 ) { QUOTECAT(qstr, qcapa, qidx, ' '); } else { int i; for ( i = 0; i < nl_count; i++ ) { QUOTECAT(qstr, qcapa, qidx, '\n'); } } lastIndent = indt_len - lvl->spaces; YYCURSOR -= lastIndent; if ( indt_len < lvl->spaces ) { POP_LEVEL(); YYCURSOR = YYTOKEN; RETURN_YAML_BLOCK(); } goto ScalarBlock2; } #line 2052 "" yy208: ++YYCURSOR; switch((yych = *YYCURSOR)) { case '\n': goto yy224; default: goto yy209; } yy209: #line 1054 "token.re" { QUOTECAT(qstr, qcapa, qidx, *YYTOKEN); goto ScalarBlock2; } #line 2063 "" yy210: ++YYCURSOR; goto yy211; yy211: #line 1016 "token.re" { lvl = CURRENT_LEVEL(); if ( lvl->status != syck_lvl_block ) { eat_comments( parser ); YYTOKEN = YYCURSOR; } else { QUOTECAT(qstr, qcapa, qidx, *YYTOKEN); } goto ScalarBlock2; } #line 2080 "" yy212: ++YYCURSOR; goto yy213; yy213: #line 1030 "token.re" { YYCURSOR--; POP_LEVEL(); RETURN_YAML_BLOCK(); } #line 2089 "" yy214: yyaccept = 1; yych = *(YYMARKER = ++YYCURSOR); switch(yych){ case '-': goto yy216; default: goto yy209; } yy215: yych = *++YYCURSOR; goto yy209; yy216: yych = *++YYCURSOR; switch(yych){ case '-': goto yy218; default: goto yy217; } yy217: YYCURSOR = YYMARKER; switch(yyaccept){ case 0: goto yy207; case 1: goto yy209; } yy218: yych = *++YYCURSOR; switch(yych){ case '\n': goto yy219; case '\r': goto yy223; case ' ': goto yy221; default: goto yy217; } yy219: ++YYCURSOR; goto yy220; yy220: #line 1035 "token.re" { if ( YYTOKEN == YYLINEPTR ) { if ( blockType == BLOCK_FOLD && qidx > 0 ) { qidx -= 1; } QUOTECAT(qstr, qcapa, qidx, '\n'); POP_LEVEL(); YYCURSOR = YYTOKEN; RETURN_YAML_BLOCK(); } else { QUOTECAT(qstr, qcapa, qidx, *YYTOKEN); YYCURSOR = YYTOKEN + 1; goto ScalarBlock2; } } #line 2137 "" yy221: ++YYCURSOR; if(YYLIMIT == YYCURSOR) YYFILL(1); yych = *YYCURSOR; goto yy222; yy222: switch(yych){ case ' ': goto yy221; default: goto yy220; } yy223: yych = *++YYCURSOR; switch(yych){ case '\n': goto yy219; default: goto yy217; } yy224: yyaccept = 0; YYMARKER = ++YYCURSOR; if(YYLIMIT == YYCURSOR) YYFILL(1); yych = *YYCURSOR; goto yy225; yy225: switch(yych){ case '\n': case ' ': goto yy224; case '\r': goto yy226; default: goto yy207; } yy226: ++YYCURSOR; if(YYLIMIT == YYCURSOR) YYFILL(1); yych = *YYCURSOR; switch(yych){ case '\n': goto yy224; default: goto yy217; } } #line 1059 "token.re" } return 0; } void eat_comments( SyckParser *parser ) { Comment: { YYTOKEN = YYCURSOR; #line 2173 "" { YYCTYPE yych; unsigned int yyaccept; goto yy227; yy228: ++YYCURSOR; yy227: if((YYLIMIT - YYCURSOR) < 2) YYFILL(2); yych = *YYCURSOR; switch(yych){ case '\000': goto yy229; case '\n': goto yy231; case '\r': goto yy232; default: goto yy234; } yy229: ++YYCURSOR; goto yy230; yy230: #line 1075 "token.re" { YYCURSOR = YYTOKEN; return; } #line 2195 "" yy231: yyaccept = 0; yych = *(YYMARKER = ++YYCURSOR); goto yy236; yy232: ++YYCURSOR; switch((yych = *YYCURSOR)) { case '\n': goto yy235; default: goto yy233; } yy233: #line 1079 "token.re" { goto Comment; } #line 2208 "" yy234: yych = *++YYCURSOR; goto yy233; yy235: yyaccept = 0; YYMARKER = ++YYCURSOR; if(YYLIMIT == YYCURSOR) YYFILL(1); yych = *YYCURSOR; goto yy236; yy236: switch(yych){ case '\n': goto yy235; case '\r': goto yy237; default: goto yy230; } yy237: ++YYCURSOR; if(YYLIMIT == YYCURSOR) YYFILL(1); yych = *YYCURSOR; switch(yych){ case '\n': goto yy235; default: goto yy238; } yy238: YYCURSOR = YYMARKER; switch(yyaccept){ case 0: goto yy230; } } #line 1082 "token.re" } } char escape_seq( char ch ) { switch ( ch ) { case '0': return '\0'; case 'a': return 7; case 'b': return '\010'; case 'e': return '\033'; case 'f': return '\014'; case 'n': return '\n'; case 'r': return '\015'; case 't': return '\t'; case 'v': return '\013'; default: return ch; } } int is_newline( char *ptr ) { return newline_len( ptr ); } int newline_len( char *ptr ) { if ( *ptr == '\n' ) return 1; if ( *ptr == '\r' && *( ptr + 1 ) == '\n' ) return 2; return 0; } int syckwrap() { return 1; } void syckerror( char *msg ) { if ( syck_parser_ptr->error_handler == NULL ) syck_parser_ptr->error_handler = syck_default_error_handler; syck_parser_ptr->root = syck_parser_ptr->root_on_error; (syck_parser_ptr->error_handler)(syck_parser_ptr, msg); } ruby-mkrf-0.2.3.orig/test/sample_files/syck-0.55/lib/handler.c0000644000000000000000000000650311672453175022415 0ustar rootroot/* * handler.c * * $Author: why $ * $Date: 2005/01/08 21:44:00 $ * * Copyright (C) 2003 why the lucky stiff */ #include "syck.h" SYMID syck_hdlr_add_node( SyckParser *p, SyckNode *n ) { SYMID id; if ( ! n->id ) { n->id = (p->handler)( p, n ); } id = n->id; if ( n->anchor == NULL ) { syck_free_node( n ); } return id; } SyckNode * syck_hdlr_add_anchor( SyckParser *p, char *a, SyckNode *n ) { char *atmp = NULL; SyckNode *ntmp = NULL; n->anchor = a; if ( p->bad_anchors != NULL ) { SyckNode *bad; if ( st_lookup( p->bad_anchors, (st_data_t)a, (st_data_t *)&bad ) ) { if ( n->kind != syck_str_kind ) { n->id = bad->id; (p->handler)( p, n ); } } } if ( p->anchors == NULL ) { p->anchors = st_init_strtable(); } if ( st_lookup( p->anchors, (st_data_t)a, (st_data_t *)&ntmp ) ) { if ( ntmp != (void *)1 ) { syck_free_node( ntmp ); } } st_insert( p->anchors, (st_data_t)a, (st_data_t)n ); return n; } void syck_hdlr_remove_anchor( SyckParser *p, char *a ) { char *atmp = a; SyckNode *ntmp; if ( p->anchors == NULL ) { p->anchors = st_init_strtable(); } if ( st_delete( p->anchors, (st_data_t *)&atmp, (st_data_t *)&ntmp ) ) { if ( ntmp != (void *)1 ) { syck_free_node( ntmp ); } } st_insert( p->anchors, (st_data_t)a, (st_data_t)1 ); } SyckNode * syck_hdlr_get_anchor( SyckParser *p, char *a ) { SyckNode *n = NULL; if ( p->anchors != NULL ) { if ( st_lookup( p->anchors, (st_data_t)a, (st_data_t *)&n ) ) { if ( n != (void *)1 ) { S_FREE( a ); return n; } else { if ( p->bad_anchors == NULL ) { p->bad_anchors = st_init_strtable(); } if ( ! st_lookup( p->bad_anchors, (st_data_t)a, (st_data_t *)&n ) ) { n = (p->bad_anchor_handler)( p, a ); st_insert( p->bad_anchors, (st_data_t)a, (st_data_t)n ); } } } } if ( n == NULL ) { n = (p->bad_anchor_handler)( p, a ); } if ( n->anchor ) { S_FREE( a ); } else { n->anchor = a; } return n; } void syck_add_transfer( char *uri, SyckNode *n, int taguri ) { if ( n->type_id != NULL ) { S_FREE( n->type_id ); } if ( taguri == 0 ) { n->type_id = uri; return; } n->type_id = syck_type_id_to_uri( uri ); S_FREE( uri ); } char * syck_xprivate( char *type_id, int type_len ) { char *uri = S_ALLOC_N( char, type_len + 14 ); uri[0] = '\0'; strcat( uri, "x-private:" ); strncat( uri, type_id, type_len ); return uri; } char * syck_taguri( char *domain, char *type_id, int type_len ) { char *uri = S_ALLOC_N( char, strlen( domain ) + type_len + 14 ); uri[0] = '\0'; strcat( uri, "tag:" ); strcat( uri, domain ); strcat( uri, ":" ); strncat( uri, type_id, type_len ); return uri; } int syck_try_implicit( SyckNode *n ) { return 1; } ruby-mkrf-0.2.3.orig/test/sample_files/syck-0.55/lib/bytecode.re0000644000000000000000000003002011672453175022751 0ustar rootroot/* * bytecode.re * * $Author: why $ * $Date: 2005/04/13 06:27:54 $ * * Copyright (C) 2003 why the lucky stiff */ #include "syck.h" #include "gram.h" #define QUOTELEN 128 /* * They do my bidding... */ #define YYCTYPE char #define YYCURSOR parser->cursor #define YYMARKER parser->marker #define YYLIMIT parser->limit #define YYTOKEN parser->token #define YYTOKTMP parser->toktmp #define YYLINEPTR parser->lineptr #define YYLINECTPTR parser->linectptr #define YYLINE parser->linect #define YYFILL(n) syck_parser_read(parser) extern SyckParser *syck_parser_ptr; char *get_inline( SyckParser *parser ); /* * Repositions the cursor at `n' offset from the token start. * Only works in `Header' and `Document' sections. */ #define YYPOS(n) YYCURSOR = YYTOKEN + n /* * Track line numbers */ #define CHK_NL(ptr) if ( *( ptr - 1 ) == '\n' && ptr > YYLINECTPTR ) { YYLINEPTR = ptr; YYLINE++; YYLINECTPTR = YYLINEPTR; } /* * I like seeing the level operations as macros... */ #define ADD_LEVEL(len, status) syck_parser_add_level( parser, len, status ) #define POP_LEVEL() syck_parser_pop_level( parser ) #define CURRENT_LEVEL() syck_parser_current_level( parser ) /* * Force a token next time around sycklex() */ #define FORCE_NEXT_TOKEN(tok) parser->force_token = tok; /* * Adding levels in bytecode requires us to make sure * we've got all our tokens worked out. */ #define ADD_BYTE_LEVEL(lvl, len, s ) \ switch ( lvl->status ) \ { \ case syck_lvl_seq: \ lvl->ncount++; \ ADD_LEVEL(len, syck_lvl_open); \ YYPOS(0); \ return '-'; \ \ case syck_lvl_map: \ lvl->ncount++; \ ADD_LEVEL(len, s); \ break; \ \ case syck_lvl_open: \ lvl->status = s; \ break; \ \ default: \ ADD_LEVEL(len, s); \ break; \ } /* * Nice little macro to ensure we're YAML_IOPENed to the current level. * * Only use this macro in the "Document" section * */ #define ENSURE_YAML_IOPEN(last_lvl, lvl_type, to_len, reset) \ if ( last_lvl->spaces < to_len ) \ { \ if ( last_lvl->status == syck_lvl_iseq || last_lvl->status == syck_lvl_imap ) \ { \ goto Document; \ } \ else \ { \ ADD_LEVEL( to_len, lvl_type ); \ if ( reset == 1 ) YYPOS(0); \ return YAML_IOPEN; \ } \ } /* * Nice little macro to ensure closure of levels. * * Only use this macro in the "Document" section * */ #define ENSURE_YAML_IEND(last_lvl, to_len) \ if ( last_lvl->spaces > to_len ) \ { \ syck_parser_pop_level( parser ); \ YYPOS(0); \ return YAML_IEND; \ } /* * Concatenates string items and manages allocation * to the string */ #define CAT(s, c, i, l) \ { \ if ( i + 1 >= c ) \ { \ c += QUOTELEN; \ S_REALLOC_N( s, char, c ); \ } \ s[i++] = l; \ s[i] = '\0'; \ } /* * Parser for standard YAML Bytecode [UTF-8] */ int sycklex_bytecode_utf8( YYSTYPE *sycklval, SyckParser *parser ) { SyckLevel *lvl; int doc_level = 0; syck_parser_ptr = parser; if ( YYCURSOR == NULL ) { syck_parser_read( parser ); } if ( parser->force_token != 0 ) { int t = parser->force_token; parser->force_token = 0; return t; } /*!re2c LF = ( "\n" | "\r\n" ) ; NULL = [\000] ; ANY = [\001-\377] ; YWORDC = [A-Za-z0-9_-] ; YWORDP = [A-Za-z0-9_-\.] ; DOC = "D" LF ; DIR = "V" YWORDP+ ":" YWORDP+ LF ; PAU = "P" LF ; MAP = "M" LF ; SEQ = "Q" LF ; END = "E" LF ; SCA = "S" ; SCC = "C" ; NNL = "N" [0-9]*; NLZ = "Z" ; ANC = "A" ; REF = "R" ; TAG = "T" ; COM = "c" ; */ lvl = CURRENT_LEVEL(); if ( lvl->status == syck_lvl_doc ) { goto Document; } Header: YYTOKEN = YYCURSOR; /*!re2c DOC { if ( lvl->status == syck_lvl_header ) { CHK_NL(YYCURSOR); goto Directive; } else { ENSURE_YAML_IEND(lvl, -1); YYPOS(0); return 0; } } ANY { YYPOS(0); goto Document; } */ Document: { lvl = CURRENT_LEVEL(); if ( lvl->status == syck_lvl_header ) { lvl->status = syck_lvl_doc; } YYTOKEN = YYCURSOR; /*!re2c DOC | PAU { ENSURE_YAML_IEND(lvl, -1); YYPOS(0); return 0; } MAP { int complex = 0; if ( lvl->ncount % 2 == 0 && ( lvl->status == syck_lvl_map || lvl->status == syck_lvl_seq ) ) { complex = 1; } ADD_BYTE_LEVEL(lvl, lvl->spaces + 1, syck_lvl_map); CHK_NL(YYCURSOR); if ( complex ) { FORCE_NEXT_TOKEN( YAML_IOPEN ); return '?'; } return YAML_IOPEN; } SEQ { int complex = 0; if ( lvl->ncount % 2 == 0 && ( lvl->status == syck_lvl_map || lvl->status == syck_lvl_seq ) ) { complex = 1; } ADD_BYTE_LEVEL(lvl, lvl->spaces + 1, syck_lvl_seq); CHK_NL(YYCURSOR); if ( complex ) { FORCE_NEXT_TOKEN( YAML_IOPEN ); return '?'; } return YAML_IOPEN; } END { if ( lvl->status == syck_lvl_seq && lvl->ncount == 0 ) { lvl->ncount++; YYPOS(0); FORCE_NEXT_TOKEN( ']' ); return '['; } else if ( lvl->status == syck_lvl_map && lvl->ncount == 0 ) { lvl->ncount++; YYPOS(0); FORCE_NEXT_TOKEN( '}' ); return '{'; } POP_LEVEL(); lvl = CURRENT_LEVEL(); if ( lvl->status == syck_lvl_seq ) { FORCE_NEXT_TOKEN(YAML_INDENT); } else if ( lvl->status == syck_lvl_map ) { if ( lvl->ncount % 2 == 1 ) { FORCE_NEXT_TOKEN(':'); } else { FORCE_NEXT_TOKEN(YAML_INDENT); } } CHK_NL(YYCURSOR); return YAML_IEND; } SCA { ADD_BYTE_LEVEL(lvl, lvl->spaces + 1, syck_lvl_str); goto Scalar; } ANC { ADD_BYTE_LEVEL(lvl, lvl->spaces + 1, syck_lvl_open); sycklval->name = get_inline( parser ); syck_hdlr_remove_anchor( parser, sycklval->name ); CHK_NL(YYCURSOR); return YAML_ANCHOR; } REF { ADD_BYTE_LEVEL(lvl, lvl->spaces + 1, syck_lvl_str); sycklval->name = get_inline( parser ); POP_LEVEL(); if ( *( YYCURSOR - 1 ) == '\n' ) YYCURSOR--; return YAML_ALIAS; } TAG { char *qstr; ADD_BYTE_LEVEL(lvl, lvl->spaces + 1, syck_lvl_open); qstr = get_inline( parser ); CHK_NL(YYCURSOR); if ( qstr[0] == '!' ) { int qidx = strlen( qstr ); if ( qstr[1] == '\0' ) { free( qstr ); return YAML_ITRANSFER; } lvl = CURRENT_LEVEL(); /* * URL Prefixing */ if ( qstr[1] == '^' ) { sycklval->name = S_ALLOC_N( char, qidx + strlen( lvl->domain ) ); sycklval->name[0] = '\0'; strcat( sycklval->name, lvl->domain ); strncat( sycklval->name, qstr + 2, qidx - 2 ); free( qstr ); } else { char *carat = qstr + 1; char *qend = qstr + qidx; while ( (++carat) < qend ) { if ( *carat == '^' ) break; } if ( carat < qend ) { free( lvl->domain ); lvl->domain = syck_strndup( qstr + 1, carat - ( qstr + 1 ) ); sycklval->name = S_ALLOC_N( char, ( qend - carat ) + strlen( lvl->domain ) ); sycklval->name[0] = '\0'; strcat( sycklval->name, lvl->domain ); strncat( sycklval->name, carat + 1, ( qend - carat ) - 1 ); free( qstr ); } else { sycklval->name = S_ALLOC_N( char, strlen( qstr ) ); sycklval->name[0] = '\0'; S_MEMCPY( sycklval->name, qstr + 1, char, strlen( qstr ) ); free( qstr ); } } return YAML_TRANSFER; } sycklval->name = qstr; return YAML_TAGURI; } COM { goto Comment; } LF { CHK_NL(YYCURSOR); if ( lvl->status == syck_lvl_seq ) { return YAML_INDENT; } else if ( lvl->status == syck_lvl_map ) { if ( lvl->ncount % 2 == 1 ) return ':'; else return YAML_INDENT; } goto Document; } NULL { ENSURE_YAML_IEND(lvl, -1); YYPOS(0); return 0; } */ } Directive: { YYTOKEN = YYCURSOR; /*!re2c DIR { CHK_NL(YYCURSOR); goto Directive; } ANY { YYCURSOR = YYTOKEN; return YAML_DOCSEP; } */ } Comment: { YYTOKEN = YYCURSOR; /*!re2c LF { CHK_NL(YYCURSOR); goto Document; } ANY { goto Comment; } */ } Scalar: { int idx = 0; int cap = 100; char *str = S_ALLOC_N( char, cap ); char *tok; str[0] = '\0'; Scalar2: tok = YYCURSOR; /*!re2c LF SCC { CHK_NL(tok+1); goto Scalar2; } LF NNL { CHK_NL(tok+1); if ( tok + 2 < YYCURSOR ) { char *count = tok + 2; int total = strtod( count, NULL ); int i; for ( i = 0; i < total; i++ ) { CAT(str, cap, idx, '\n'); } } else { CAT(str, cap, idx, '\n'); } goto Scalar2; } LF NLZ { CHK_NL(tok+1); CAT(str, cap, idx, '\0'); goto Scalar2; } LF { YYCURSOR = tok; goto ScalarEnd; } NULL { YYCURSOR = tok; goto ScalarEnd; } ANY { CAT(str, cap, idx, tok[0]); goto Scalar2; } */ ScalarEnd: { SyckNode *n = syck_alloc_str(); n->data.str->ptr = str; n->data.str->len = idx; sycklval->nodeData = n; POP_LEVEL(); if ( parser->implicit_typing == 1 ) { try_tag_implicit( sycklval->nodeData, parser->taguri_expansion ); } return YAML_PLAIN; } } } char * get_inline( SyckParser *parser ) { int idx = 0; int cap = 100; char *str = S_ALLOC_N( char, cap ); char *tok; str[0] = '\0'; Inline: { tok = YYCURSOR; /*!re2c LF { CHK_NL(YYCURSOR); return str; } NULL { YYCURSOR = tok; return str; } ANY { CAT(str, cap, idx, tok[0]); goto Inline; } */ } } ruby-mkrf-0.2.3.orig/test/sample_files/syck-0.55/lib/gram.c0000644000000000000000000015656311672453175021742 0ustar rootroot/* A Bison parser, made by GNU Bison 1.875d. */ /* Skeleton parser for Yacc-like parsing with Bison, Copyright (C) 1984, 1989, 1990, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc. This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, write to the Free Software Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. */ /* As a special exception, when this file is copied by Bison into a Bison output file, you may use that output file without restriction. This special exception was added by the Free Software Foundation in version 1.24 of Bison. */ /* Written by Richard Stallman by simplifying the original so called ``semantic'' parser. */ /* All symbols defined below should begin with yy or YY, to avoid infringing on user name space. This should be done even for local variables, as they might otherwise be expanded by user macros. There are some unavoidable exceptions within include files to define necessary library symbols; they are noted "INFRINGES ON USER NAME SPACE" below. */ /* Identify Bison output. */ #define YYBISON 1 /* Skeleton name. */ #define YYSKELETON_NAME "yacc.c" /* Pure parsers. */ #define YYPURE 1 /* Using locations. */ #define YYLSP_NEEDED 0 /* If NAME_PREFIX is specified substitute the variables and functions names. */ #define yyparse syckparse #define yylex sycklex #define yyerror syckerror #define yylval sycklval #define yychar syckchar #define yydebug syckdebug #define yynerrs sycknerrs /* Tokens. */ #ifndef YYTOKENTYPE # define YYTOKENTYPE /* Put the tokens into the symbol table, so that GDB and other debuggers know about them. */ enum yytokentype { YAML_ANCHOR = 258, YAML_ALIAS = 259, YAML_TRANSFER = 260, YAML_TAGURI = 261, YAML_ITRANSFER = 262, YAML_WORD = 263, YAML_PLAIN = 264, YAML_BLOCK = 265, YAML_DOCSEP = 266, YAML_IOPEN = 267, YAML_INDENT = 268, YAML_IEND = 269 }; #endif #define YAML_ANCHOR 258 #define YAML_ALIAS 259 #define YAML_TRANSFER 260 #define YAML_TAGURI 261 #define YAML_ITRANSFER 262 #define YAML_WORD 263 #define YAML_PLAIN 264 #define YAML_BLOCK 265 #define YAML_DOCSEP 266 #define YAML_IOPEN 267 #define YAML_INDENT 268 #define YAML_IEND 269 /* Copy the first part of user declarations. */ #line 14 "gram.y" #include "syck.h" void apply_seq_in_map( SyckParser *parser, SyckNode *n ); #define YYPARSE_PARAM parser #define YYLEX_PARAM parser #define NULL_NODE(parser, node) \ SyckNode *node = syck_new_str( "", scalar_plain ); \ if ( ((SyckParser *)parser)->taguri_expansion == 1 ) \ { \ node->type_id = syck_taguri( YAML_DOMAIN, "null", 4 ); \ } \ else \ { \ node->type_id = syck_strndup( "null", 4 ); \ } /* Enabling traces. */ #ifndef YYDEBUG # define YYDEBUG 1 #endif /* Enabling verbose error messages. */ #ifdef YYERROR_VERBOSE # undef YYERROR_VERBOSE # define YYERROR_VERBOSE 1 #else # define YYERROR_VERBOSE 0 #endif #if ! defined (YYSTYPE) && ! defined (YYSTYPE_IS_DECLARED) #line 35 "gram.y" typedef union YYSTYPE { SYMID nodeId; SyckNode *nodeData; char *name; } YYSTYPE; /* Line 191 of yacc.c. */ #line 140 "gram.c" # define yystype YYSTYPE /* obsolescent; will be withdrawn */ # define YYSTYPE_IS_DECLARED 1 # define YYSTYPE_IS_TRIVIAL 1 #endif /* Copy the second part of user declarations. */ /* Line 214 of yacc.c. */ #line 152 "gram.c" #if ! defined (yyoverflow) || YYERROR_VERBOSE # ifndef YYFREE # define YYFREE free # endif # ifndef YYMALLOC # define YYMALLOC malloc # endif /* The parser invokes alloca or malloc; define the necessary symbols. */ # ifdef YYSTACK_USE_ALLOCA # if YYSTACK_USE_ALLOCA # define YYSTACK_ALLOC alloca # endif # else # if defined (alloca) || defined (_ALLOCA_H) # define YYSTACK_ALLOC alloca # else # ifdef __GNUC__ # define YYSTACK_ALLOC __builtin_alloca # endif # endif # endif # ifdef YYSTACK_ALLOC /* Pacify GCC's `empty if-body' warning. */ # define YYSTACK_FREE(Ptr) do { /* empty */; } while (0) # else # if defined (__STDC__) || defined (__cplusplus) # include /* INFRINGES ON USER NAME SPACE */ # define YYSIZE_T size_t # endif # define YYSTACK_ALLOC YYMALLOC # define YYSTACK_FREE YYFREE # endif #endif /* ! defined (yyoverflow) || YYERROR_VERBOSE */ #if (! defined (yyoverflow) \ && (! defined (__cplusplus) \ || (defined (YYSTYPE_IS_TRIVIAL) && YYSTYPE_IS_TRIVIAL))) /* A type that is properly aligned for any stack member. */ union yyalloc { short int yyss; YYSTYPE yyvs; }; /* The size of the maximum gap between one aligned stack and the next. */ # define YYSTACK_GAP_MAXIMUM (sizeof (union yyalloc) - 1) /* The size of an array large to enough to hold all stacks, each with N elements. */ # define YYSTACK_BYTES(N) \ ((N) * (sizeof (short int) + sizeof (YYSTYPE)) \ + YYSTACK_GAP_MAXIMUM) /* Copy COUNT objects from FROM to TO. The source and destination do not overlap. */ # ifndef YYCOPY # if defined (__GNUC__) && 1 < __GNUC__ # define YYCOPY(To, From, Count) \ __builtin_memcpy (To, From, (Count) * sizeof (*(From))) # else # define YYCOPY(To, From, Count) \ do \ { \ register YYSIZE_T yyi; \ for (yyi = 0; yyi < (Count); yyi++) \ (To)[yyi] = (From)[yyi]; \ } \ while (0) # endif # endif /* Relocate STACK from its old location to the new one. The local variables YYSIZE and YYSTACKSIZE give the old and new number of elements in the stack, and YYPTR gives the new location of the stack. Advance YYPTR to a properly aligned location for the next stack. */ # define YYSTACK_RELOCATE(Stack) \ do \ { \ YYSIZE_T yynewbytes; \ YYCOPY (&yyptr->Stack, Stack, yysize); \ Stack = &yyptr->Stack; \ yynewbytes = yystacksize * sizeof (*Stack) + YYSTACK_GAP_MAXIMUM; \ yyptr += yynewbytes / sizeof (*yyptr); \ } \ while (0) #endif #if defined (__STDC__) || defined (__cplusplus) typedef signed char yysigned_char; #else typedef short int yysigned_char; #endif /* YYFINAL -- State number of the termination state. */ #define YYFINAL 52 /* YYLAST -- Last index in YYTABLE. */ #define YYLAST 396 /* YYNTOKENS -- Number of terminals. */ #define YYNTOKENS 23 /* YYNNTS -- Number of nonterminals. */ #define YYNNTS 29 /* YYNRULES -- Number of rules. */ #define YYNRULES 79 /* YYNRULES -- Number of states. */ #define YYNSTATES 128 /* YYTRANSLATE(YYLEX) -- Bison symbol number corresponding to YYLEX. */ #define YYUNDEFTOK 2 #define YYMAXUTOK 269 #define YYTRANSLATE(YYX) \ ((unsigned int) (YYX) <= YYMAXUTOK ? yytranslate[YYX] : YYUNDEFTOK) /* YYTRANSLATE[YYLEX] -- Bison symbol number corresponding to YYLEX. */ static const unsigned char yytranslate[] = { 0, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 21, 15, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 16, 2, 2, 2, 2, 22, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 17, 2, 18, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 19, 2, 20, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14 }; #if YYDEBUG /* YYPRHS[YYN] -- Index of the first RHS symbol of rule number YYN in YYRHS. */ static const unsigned char yyprhs[] = { 0, 0, 3, 5, 8, 9, 11, 13, 15, 18, 21, 24, 28, 30, 32, 36, 37, 40, 43, 46, 49, 51, 54, 56, 58, 60, 63, 66, 69, 72, 75, 77, 79, 81, 85, 87, 89, 91, 93, 95, 99, 103, 106, 110, 113, 117, 120, 124, 127, 129, 133, 136, 140, 143, 145, 149, 151, 153, 157, 161, 165, 168, 172, 175, 179, 182, 184, 188, 190, 194, 196, 200, 204, 207, 211, 215, 218, 220, 224, 226 }; /* YYRHS -- A `-1'-separated list of the rules' RHS. */ static const yysigned_char yyrhs[] = { 24, 0, -1, 25, -1, 11, 27, -1, -1, 33, -1, 26, -1, 34, -1, 5, 26, -1, 6, 26, -1, 3, 26, -1, 29, 26, 32, -1, 25, -1, 28, -1, 29, 28, 30, -1, -1, 7, 28, -1, 5, 28, -1, 6, 28, -1, 3, 28, -1, 12, -1, 29, 13, -1, 14, -1, 13, -1, 14, -1, 31, 32, -1, 5, 33, -1, 6, 33, -1, 7, 33, -1, 3, 33, -1, 4, -1, 8, -1, 9, -1, 29, 33, 32, -1, 10, -1, 35, -1, 39, -1, 42, -1, 49, -1, 29, 37, 30, -1, 29, 38, 30, -1, 15, 27, -1, 5, 31, 38, -1, 5, 37, -1, 6, 31, 38, -1, 6, 37, -1, 3, 31, 38, -1, 3, 37, -1, 36, -1, 38, 31, 36, -1, 38, 31, -1, 17, 40, 18, -1, 17, 18, -1, 41, -1, 40, 21, 41, -1, 25, -1, 48, -1, 29, 43, 30, -1, 29, 47, 30, -1, 5, 31, 47, -1, 5, 43, -1, 6, 31, 47, -1, 6, 43, -1, 3, 31, 47, -1, 3, 43, -1, 33, -1, 22, 25, 31, -1, 27, -1, 44, 16, 45, -1, 46, -1, 47, 31, 36, -1, 47, 31, 46, -1, 47, 31, -1, 25, 16, 27, -1, 19, 50, 20, -1, 19, 20, -1, 51, -1, 50, 21, 51, -1, 25, -1, 48, -1 }; /* YYRLINE[YYN] -- source line where rule number YYN was defined. */ static const unsigned short int yyrline[] = { 0, 56, 56, 60, 65, 70, 71, 74, 75, 80, 85, 94, 100, 101, 104, 109, 113, 121, 126, 131, 145, 146, 149, 152, 155, 156, 164, 169, 174, 182, 186, 194, 207, 208, 218, 219, 220, 221, 222, 228, 232, 238, 244, 249, 254, 259, 264, 268, 274, 278, 283, 292, 296, 302, 306, 313, 314, 320, 325, 332, 337, 342, 347, 352, 356, 362, 363, 369, 379, 396, 397, 409, 417, 426, 434, 438, 444, 445, 454, 461 }; #endif #if YYDEBUG || YYERROR_VERBOSE /* YYTNME[SYMBOL-NUM] -- String name of the symbol SYMBOL-NUM. First, the terminals, then, starting at YYNTOKENS, nonterminals. */ static const char *const yytname[] = { "$end", "error", "$undefined", "YAML_ANCHOR", "YAML_ALIAS", "YAML_TRANSFER", "YAML_TAGURI", "YAML_ITRANSFER", "YAML_WORD", "YAML_PLAIN", "YAML_BLOCK", "YAML_DOCSEP", "YAML_IOPEN", "YAML_INDENT", "YAML_IEND", "'-'", "':'", "'['", "']'", "'{'", "'}'", "','", "'?'", "$accept", "doc", "atom", "ind_rep", "atom_or_empty", "empty", "indent_open", "indent_end", "indent_sep", "indent_flex_end", "word_rep", "struct_rep", "implicit_seq", "basic_seq", "top_imp_seq", "in_implicit_seq", "inline_seq", "in_inline_seq", "inline_seq_atom", "implicit_map", "top_imp_map", "complex_key", "complex_value", "complex_mapping", "in_implicit_map", "basic_mapping", "inline_map", "in_inline_map", "inline_map_atom", 0 }; #endif # ifdef YYPRINT /* YYTOKNUM[YYLEX-NUM] -- Internal token number corresponding to token YYLEX-NUM. */ static const unsigned short int yytoknum[] = { 0, 256, 257, 258, 259, 260, 261, 262, 263, 264, 265, 266, 267, 268, 269, 45, 58, 91, 93, 123, 125, 44, 63 }; # endif /* YYR1[YYN] -- Symbol number of symbol that rule YYN derives. */ static const unsigned char yyr1[] = { 0, 23, 24, 24, 24, 25, 25, 26, 26, 26, 26, 26, 27, 27, 28, 28, 28, 28, 28, 28, 29, 29, 30, 31, 32, 32, 33, 33, 33, 33, 33, 33, 33, 33, 34, 34, 34, 34, 34, 35, 35, 36, 37, 37, 37, 37, 37, 37, 38, 38, 38, 39, 39, 40, 40, 41, 41, 42, 42, 43, 43, 43, 43, 43, 43, 44, 44, 45, 46, 47, 47, 47, 47, 48, 49, 49, 50, 50, 51, 51 }; /* YYR2[YYN] -- Number of symbols composing right hand side of rule YYN. */ static const unsigned char yyr2[] = { 0, 2, 1, 2, 0, 1, 1, 1, 2, 2, 2, 3, 1, 1, 3, 0, 2, 2, 2, 2, 1, 2, 1, 1, 1, 2, 2, 2, 2, 2, 1, 1, 1, 3, 1, 1, 1, 1, 1, 3, 3, 2, 3, 2, 3, 2, 3, 2, 1, 3, 2, 3, 2, 1, 3, 1, 1, 3, 3, 3, 2, 3, 2, 3, 2, 1, 3, 1, 3, 1, 3, 3, 2, 3, 3, 2, 1, 3, 1, 1 }; /* YYDEFACT[STATE-NAME] -- Default rule to reduce with in state STATE-NUM when YYTABLE doesn't specify something else to do. Zero means the default is an error. */ static const unsigned char yydefact[] = { 4, 0, 30, 0, 0, 0, 31, 32, 34, 15, 20, 0, 0, 0, 2, 6, 0, 5, 7, 35, 36, 37, 38, 10, 29, 8, 26, 9, 27, 0, 0, 0, 0, 28, 15, 15, 15, 15, 12, 3, 13, 15, 52, 55, 0, 53, 56, 75, 78, 79, 0, 76, 1, 0, 0, 0, 21, 15, 0, 0, 65, 48, 0, 0, 0, 0, 69, 0, 0, 19, 17, 18, 15, 15, 15, 16, 15, 15, 15, 15, 0, 15, 51, 0, 74, 0, 23, 0, 47, 64, 0, 43, 60, 0, 45, 62, 41, 0, 24, 0, 11, 33, 22, 39, 40, 50, 57, 15, 58, 72, 14, 73, 54, 77, 65, 46, 63, 42, 59, 44, 61, 66, 25, 49, 67, 68, 70, 71 }; /* YYDEFGOTO[NTERM-NUM]. */ static const yysigned_char yydefgoto[] = { -1, 13, 38, 15, 39, 40, 16, 103, 99, 101, 17, 18, 19, 61, 62, 63, 20, 44, 45, 21, 64, 65, 125, 66, 67, 46, 22, 50, 51 }; /* YYPACT[STATE-NUM] -- Index in YYTABLE of the portion describing STATE-NUM. */ #define YYPACT_NINF -97 static const short int yypact[] = { 250, 318, -97, 318, 318, 374, -97, -97, -97, 335, -97, 267, 232, 7, -97, -97, 192, -97, -97, -97, -97, -97, -97, -97, -97, -97, -97, -97, -97, 374, 374, 374, 352, -97, 335, 335, 335, 384, -97, -97, -97, 212, -97, 10, 0, -97, -97, -97, 10, -97, -4, -97, -97, 284, 284, 284, -97, 335, 318, 30, 30, -97, -2, 36, -2, 16, -97, 36, 30, -97, -97, -97, 384, 384, 384, -97, 363, 301, 301, 301, -2, 335, -97, 318, -97, 318, -97, 158, -97, -97, 158, -97, -97, 158, -97, -97, -97, 24, -97, 30, -97, -97, -97, -97, -97, 26, -97, 335, -97, 158, -97, -97, -97, -97, -97, 24, 24, 24, 24, 24, 24, -97, -97, -97, -97, -97, -97, -97 }; /* YYPGOTO[NTERM-NUM]. */ static const yysigned_char yypgoto[] = { -97, -97, 8, 81, -56, 109, 33, -53, 74, -54, -1, -97, -97, -96, -31, -32, -97, -97, -44, -97, 77, -97, -97, -52, 9, -6, -97, -97, -29 }; /* YYTABLE[YYPACT[STATE-NUM]]. What to do in state STATE-NUM. If positive, shift that token. If negative, reduce the rule which number is the opposite. If zero, do what YYDEFACT says. If YYTABLE_NINF, syntax error. */ #define YYTABLE_NINF -1 static const unsigned char yytable[] = { 24, 96, 26, 28, 33, 100, 49, 52, 14, 123, 104, 106, 102, 126, 108, 60, 84, 85, 82, 43, 48, 83, 88, 91, 94, 111, 81, 110, 24, 26, 28, 68, 107, 24, 26, 28, 33, 86, 32, 112, 60, 57, 41, 86, 98, 122, 88, 91, 94, 86, 102, 124, 24, 26, 28, 115, 113, 127, 117, 0, 0, 119, 32, 32, 32, 32, 97, 41, 41, 41, 76, 24, 26, 28, 41, 68, 24, 26, 28, 49, 0, 0, 23, 0, 25, 27, 114, 0, 0, 114, 41, 43, 114, 48, 0, 0, 116, 59, 0, 118, 0, 0, 120, 0, 0, 76, 76, 76, 114, 76, 41, 41, 41, 0, 41, 23, 25, 27, 0, 0, 32, 0, 59, 32, 0, 0, 32, 87, 90, 93, 89, 92, 95, 0, 23, 25, 27, 105, 0, 0, 41, 109, 32, 69, 70, 71, 75, 0, 0, 0, 80, 87, 90, 93, 89, 92, 95, 0, 23, 25, 27, 29, 2, 30, 31, 5, 6, 7, 0, 0, 10, 121, 0, 57, 0, 0, 0, 0, 0, 0, 58, 69, 70, 71, 0, 80, 69, 70, 71, 105, 109, 105, 109, 105, 109, 53, 2, 54, 55, 5, 6, 7, 8, 0, 10, 56, 0, 57, 0, 11, 0, 12, 0, 0, 58, 77, 2, 78, 79, 37, 6, 7, 8, 0, 10, 56, 0, 57, 0, 11, 0, 12, 0, 0, 58, 1, 2, 3, 4, 5, 6, 7, 8, 0, 10, 0, 0, 0, 0, 11, 0, 12, 47, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 0, 0, 0, 0, 11, 0, 12, 1, 2, 3, 4, 5, 6, 7, 8, 0, 10, 0, 0, 0, 0, 11, 42, 12, 53, 2, 54, 55, 5, 6, 7, 8, 0, 10, 86, 0, 0, 0, 11, 0, 12, 77, 2, 78, 79, 37, 6, 7, 8, 0, 10, 86, 0, 0, 0, 11, 0, 12, 1, 2, 3, 4, 5, 6, 7, 8, 0, 10, 0, 0, 0, 0, 11, 0, 12, 34, 2, 35, 36, 37, 6, 7, 8, 0, 10, 0, 0, 0, 0, 11, 0, 12, 29, 2, 30, 31, 5, 6, 7, 0, 0, 10, 56, 72, 2, 73, 74, 37, 6, 7, 0, 0, 10, 56, 29, 2, 30, 31, 5, 6, 7, 0, 0, 10, 72, 2, 73, 74, 37, 6, 7, 0, 0, 10 }; static const yysigned_char yycheck[] = { 1, 57, 3, 4, 5, 59, 12, 0, 0, 105, 63, 64, 14, 109, 67, 16, 20, 21, 18, 11, 12, 21, 53, 54, 55, 81, 16, 80, 29, 30, 31, 32, 16, 34, 35, 36, 37, 13, 5, 83, 41, 15, 9, 13, 14, 99, 77, 78, 79, 13, 14, 107, 53, 54, 55, 87, 85, 109, 90, -1, -1, 93, 29, 30, 31, 32, 58, 34, 35, 36, 37, 72, 73, 74, 41, 76, 77, 78, 79, 85, -1, -1, 1, -1, 3, 4, 87, -1, -1, 90, 57, 83, 93, 85, -1, -1, 87, 16, -1, 90, -1, -1, 93, -1, -1, 72, 73, 74, 109, 76, 77, 78, 79, -1, 81, 34, 35, 36, -1, -1, 87, -1, 41, 90, -1, -1, 93, 53, 54, 55, 53, 54, 55, -1, 53, 54, 55, 63, -1, -1, 107, 67, 109, 34, 35, 36, 37, -1, -1, -1, 41, 77, 78, 79, 77, 78, 79, -1, 77, 78, 79, 3, 4, 5, 6, 7, 8, 9, -1, -1, 12, 97, -1, 15, -1, -1, -1, -1, -1, -1, 22, 72, 73, 74, -1, 76, 77, 78, 79, 115, 116, 117, 118, 119, 120, 3, 4, 5, 6, 7, 8, 9, 10, -1, 12, 13, -1, 15, -1, 17, -1, 19, -1, -1, 22, 3, 4, 5, 6, 7, 8, 9, 10, -1, 12, 13, -1, 15, -1, 17, -1, 19, -1, -1, 22, 3, 4, 5, 6, 7, 8, 9, 10, -1, 12, -1, -1, -1, -1, 17, -1, 19, 20, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, -1, -1, -1, -1, 17, -1, 19, 3, 4, 5, 6, 7, 8, 9, 10, -1, 12, -1, -1, -1, -1, 17, 18, 19, 3, 4, 5, 6, 7, 8, 9, 10, -1, 12, 13, -1, -1, -1, 17, -1, 19, 3, 4, 5, 6, 7, 8, 9, 10, -1, 12, 13, -1, -1, -1, 17, -1, 19, 3, 4, 5, 6, 7, 8, 9, 10, -1, 12, -1, -1, -1, -1, 17, -1, 19, 3, 4, 5, 6, 7, 8, 9, 10, -1, 12, -1, -1, -1, -1, 17, -1, 19, 3, 4, 5, 6, 7, 8, 9, -1, -1, 12, 13, 3, 4, 5, 6, 7, 8, 9, -1, -1, 12, 13, 3, 4, 5, 6, 7, 8, 9, -1, -1, 12, 3, 4, 5, 6, 7, 8, 9, -1, -1, 12 }; /* YYSTOS[STATE-NUM] -- The (internal number of the) accessing symbol of state STATE-NUM. */ static const unsigned char yystos[] = { 0, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 17, 19, 24, 25, 26, 29, 33, 34, 35, 39, 42, 49, 26, 33, 26, 33, 26, 33, 3, 5, 6, 29, 33, 3, 5, 6, 7, 25, 27, 28, 29, 18, 25, 40, 41, 48, 20, 25, 48, 50, 51, 0, 3, 5, 6, 13, 15, 22, 26, 33, 36, 37, 38, 43, 44, 46, 47, 33, 28, 28, 28, 3, 5, 6, 28, 29, 3, 5, 6, 28, 16, 18, 21, 20, 21, 13, 31, 37, 43, 31, 37, 43, 31, 37, 43, 27, 25, 14, 31, 32, 32, 14, 30, 30, 31, 30, 16, 30, 31, 30, 27, 41, 51, 33, 38, 47, 38, 47, 38, 47, 31, 32, 36, 27, 45, 36, 46 }; #if ! defined (YYSIZE_T) && defined (__SIZE_TYPE__) # define YYSIZE_T __SIZE_TYPE__ #endif #if ! defined (YYSIZE_T) && defined (size_t) # define YYSIZE_T size_t #endif #if ! defined (YYSIZE_T) # if defined (__STDC__) || defined (__cplusplus) # include /* INFRINGES ON USER NAME SPACE */ # define YYSIZE_T size_t # endif #endif #if ! defined (YYSIZE_T) # define YYSIZE_T unsigned int #endif #define yyerrok (yyerrstatus = 0) #define yyclearin (yychar = YYEMPTY) #define YYEMPTY (-2) #define YYEOF 0 #define YYACCEPT goto yyacceptlab #define YYABORT goto yyabortlab #define YYERROR goto yyerrorlab /* Like YYERROR except do call yyerror. This remains here temporarily to ease the transition to the new meaning of YYERROR, for GCC. Once GCC version 2 has supplanted version 1, this can go. */ #define YYFAIL goto yyerrlab #define YYRECOVERING() (!!yyerrstatus) #define YYBACKUP(Token, Value) \ do \ if (yychar == YYEMPTY && yylen == 1) \ { \ yychar = (Token); \ yylval = (Value); \ yytoken = YYTRANSLATE (yychar); \ YYPOPSTACK; \ goto yybackup; \ } \ else \ { \ yyerror ("syntax error: cannot back up");\ YYERROR; \ } \ while (0) #define YYTERROR 1 #define YYERRCODE 256 /* YYLLOC_DEFAULT -- Compute the default location (before the actions are run). */ #ifndef YYLLOC_DEFAULT # define YYLLOC_DEFAULT(Current, Rhs, N) \ ((Current).first_line = (Rhs)[1].first_line, \ (Current).first_column = (Rhs)[1].first_column, \ (Current).last_line = (Rhs)[N].last_line, \ (Current).last_column = (Rhs)[N].last_column) #endif /* YYLEX -- calling `yylex' with the right arguments. */ #ifdef YYLEX_PARAM # define YYLEX yylex (&yylval, YYLEX_PARAM) #else # define YYLEX yylex (&yylval) #endif /* Enable debugging if requested. */ #if YYDEBUG # ifndef YYFPRINTF # include /* INFRINGES ON USER NAME SPACE */ # define YYFPRINTF fprintf # endif # define YYDPRINTF(Args) \ do { \ if (yydebug) \ YYFPRINTF Args; \ } while (0) # define YYDSYMPRINT(Args) \ do { \ if (yydebug) \ yysymprint Args; \ } while (0) # define YYDSYMPRINTF(Title, Token, Value, Location) \ do { \ if (yydebug) \ { \ YYFPRINTF (stderr, "%s ", Title); \ yysymprint (stderr, \ Token, Value); \ YYFPRINTF (stderr, "\n"); \ } \ } while (0) /*------------------------------------------------------------------. | yy_stack_print -- Print the state stack from its BOTTOM up to its | | TOP (included). | `------------------------------------------------------------------*/ #if defined (__STDC__) || defined (__cplusplus) static void yy_stack_print (short int *bottom, short int *top) #else static void yy_stack_print (bottom, top) short int *bottom; short int *top; #endif { YYFPRINTF (stderr, "Stack now"); for (/* Nothing. */; bottom <= top; ++bottom) YYFPRINTF (stderr, " %d", *bottom); YYFPRINTF (stderr, "\n"); } # define YY_STACK_PRINT(Bottom, Top) \ do { \ if (yydebug) \ yy_stack_print ((Bottom), (Top)); \ } while (0) /*------------------------------------------------. | Report that the YYRULE is going to be reduced. | `------------------------------------------------*/ #if defined (__STDC__) || defined (__cplusplus) static void yy_reduce_print (int yyrule) #else static void yy_reduce_print (yyrule) int yyrule; #endif { int yyi; unsigned int yylno = yyrline[yyrule]; YYFPRINTF (stderr, "Reducing stack by rule %d (line %u), ", yyrule - 1, yylno); /* Print the symbols being reduced, and their result. */ for (yyi = yyprhs[yyrule]; 0 <= yyrhs[yyi]; yyi++) YYFPRINTF (stderr, "%s ", yytname [yyrhs[yyi]]); YYFPRINTF (stderr, "-> %s\n", yytname [yyr1[yyrule]]); } # define YY_REDUCE_PRINT(Rule) \ do { \ if (yydebug) \ yy_reduce_print (Rule); \ } while (0) /* Nonzero means print parse trace. It is left uninitialized so that multiple parsers can coexist. */ int yydebug; #else /* !YYDEBUG */ # define YYDPRINTF(Args) # define YYDSYMPRINT(Args) # define YYDSYMPRINTF(Title, Token, Value, Location) # define YY_STACK_PRINT(Bottom, Top) # define YY_REDUCE_PRINT(Rule) #endif /* !YYDEBUG */ /* YYINITDEPTH -- initial size of the parser's stacks. */ #ifndef YYINITDEPTH # define YYINITDEPTH 200 #endif /* YYMAXDEPTH -- maximum size the stacks can grow to (effective only if the built-in stack extension method is used). Do not make this value too large; the results are undefined if SIZE_MAX < YYSTACK_BYTES (YYMAXDEPTH) evaluated with infinite-precision integer arithmetic. */ #if defined (YYMAXDEPTH) && YYMAXDEPTH == 0 # undef YYMAXDEPTH #endif #ifndef YYMAXDEPTH # define YYMAXDEPTH 10000 #endif #if YYERROR_VERBOSE # ifndef yystrlen # if defined (__GLIBC__) && defined (_STRING_H) # define yystrlen strlen # else /* Return the length of YYSTR. */ static YYSIZE_T # if defined (__STDC__) || defined (__cplusplus) yystrlen (const char *yystr) # else yystrlen (yystr) const char *yystr; # endif { register const char *yys = yystr; while (*yys++ != '\0') continue; return yys - yystr - 1; } # endif # endif # ifndef yystpcpy # if defined (__GLIBC__) && defined (_STRING_H) && defined (_GNU_SOURCE) # define yystpcpy stpcpy # else /* Copy YYSRC to YYDEST, returning the address of the terminating '\0' in YYDEST. */ static char * # if defined (__STDC__) || defined (__cplusplus) yystpcpy (char *yydest, const char *yysrc) # else yystpcpy (yydest, yysrc) char *yydest; const char *yysrc; # endif { register char *yyd = yydest; register const char *yys = yysrc; while ((*yyd++ = *yys++) != '\0') continue; return yyd - 1; } # endif # endif #endif /* !YYERROR_VERBOSE */ #if YYDEBUG /*--------------------------------. | Print this symbol on YYOUTPUT. | `--------------------------------*/ #if defined (__STDC__) || defined (__cplusplus) static void yysymprint (FILE *yyoutput, int yytype, YYSTYPE *yyvaluep) #else static void yysymprint (yyoutput, yytype, yyvaluep) FILE *yyoutput; int yytype; YYSTYPE *yyvaluep; #endif { /* Pacify ``unused variable'' warnings. */ (void) yyvaluep; if (yytype < YYNTOKENS) { YYFPRINTF (yyoutput, "token %s (", yytname[yytype]); # ifdef YYPRINT YYPRINT (yyoutput, yytoknum[yytype], *yyvaluep); # endif } else YYFPRINTF (yyoutput, "nterm %s (", yytname[yytype]); switch (yytype) { default: break; } YYFPRINTF (yyoutput, ")"); } #endif /* ! YYDEBUG */ /*-----------------------------------------------. | Release the memory associated to this symbol. | `-----------------------------------------------*/ #if defined (__STDC__) || defined (__cplusplus) static void yydestruct (int yytype, YYSTYPE *yyvaluep) #else static void yydestruct (yytype, yyvaluep) int yytype; YYSTYPE *yyvaluep; #endif { /* Pacify ``unused variable'' warnings. */ (void) yyvaluep; switch (yytype) { default: break; } } /* Prevent warnings from -Wmissing-prototypes. */ #ifdef YYPARSE_PARAM # if defined (__STDC__) || defined (__cplusplus) int yyparse (void *YYPARSE_PARAM); # else int yyparse (); # endif #else /* ! YYPARSE_PARAM */ #if defined (__STDC__) || defined (__cplusplus) int yyparse (void); #else int yyparse (); #endif #endif /* ! YYPARSE_PARAM */ /*----------. | yyparse. | `----------*/ #ifdef YYPARSE_PARAM # if defined (__STDC__) || defined (__cplusplus) int yyparse (void *YYPARSE_PARAM) # else int yyparse (YYPARSE_PARAM) void *YYPARSE_PARAM; # endif #else /* ! YYPARSE_PARAM */ #if defined (__STDC__) || defined (__cplusplus) int yyparse (void) #else int yyparse () #endif #endif { /* The lookahead symbol. */ int yychar; /* The semantic value of the lookahead symbol. */ YYSTYPE yylval; /* Number of syntax errors so far. */ int yynerrs; register int yystate; register int yyn; int yyresult; /* Number of tokens to shift before error messages enabled. */ int yyerrstatus; /* Lookahead token as an internal (translated) token number. */ int yytoken = 0; /* Three stacks and their tools: `yyss': related to states, `yyvs': related to semantic values, `yyls': related to locations. Refer to the stacks thru separate pointers, to allow yyoverflow to reallocate them elsewhere. */ /* The state stack. */ short int yyssa[YYINITDEPTH]; short int *yyss = yyssa; register short int *yyssp; /* The semantic value stack. */ YYSTYPE yyvsa[YYINITDEPTH]; YYSTYPE *yyvs = yyvsa; register YYSTYPE *yyvsp; #define YYPOPSTACK (yyvsp--, yyssp--) YYSIZE_T yystacksize = YYINITDEPTH; /* The variables used to return semantic value and location from the action routines. */ YYSTYPE yyval; /* When reducing, the number of symbols on the RHS of the reduced rule. */ int yylen; YYDPRINTF ((stderr, "Starting parse\n")); yystate = 0; yyerrstatus = 0; yynerrs = 0; yychar = YYEMPTY; /* Cause a token to be read. */ /* Initialize stack pointers. Waste one element of value and location stack so that they stay on the same level as the state stack. The wasted elements are never initialized. */ yyssp = yyss; yyvsp = yyvs; goto yysetstate; /*------------------------------------------------------------. | yynewstate -- Push a new state, which is found in yystate. | `------------------------------------------------------------*/ yynewstate: /* In all cases, when you get here, the value and location stacks have just been pushed. so pushing a state here evens the stacks. */ yyssp++; yysetstate: *yyssp = yystate; if (yyss + yystacksize - 1 <= yyssp) { /* Get the current used size of the three stacks, in elements. */ YYSIZE_T yysize = yyssp - yyss + 1; #ifdef yyoverflow { /* Give user a chance to reallocate the stack. Use copies of these so that the &'s don't force the real ones into memory. */ YYSTYPE *yyvs1 = yyvs; short int *yyss1 = yyss; /* Each stack pointer address is followed by the size of the data in use in that stack, in bytes. This used to be a conditional around just the two extra args, but that might be undefined if yyoverflow is a macro. */ yyoverflow ("parser stack overflow", &yyss1, yysize * sizeof (*yyssp), &yyvs1, yysize * sizeof (*yyvsp), &yystacksize); yyss = yyss1; yyvs = yyvs1; } #else /* no yyoverflow */ # ifndef YYSTACK_RELOCATE goto yyoverflowlab; # else /* Extend the stack our own way. */ if (YYMAXDEPTH <= yystacksize) goto yyoverflowlab; yystacksize *= 2; if (YYMAXDEPTH < yystacksize) yystacksize = YYMAXDEPTH; { short int *yyss1 = yyss; union yyalloc *yyptr = (union yyalloc *) YYSTACK_ALLOC (YYSTACK_BYTES (yystacksize)); if (! yyptr) goto yyoverflowlab; YYSTACK_RELOCATE (yyss); YYSTACK_RELOCATE (yyvs); # undef YYSTACK_RELOCATE if (yyss1 != yyssa) YYSTACK_FREE (yyss1); } # endif #endif /* no yyoverflow */ yyssp = yyss + yysize - 1; yyvsp = yyvs + yysize - 1; YYDPRINTF ((stderr, "Stack size increased to %lu\n", (unsigned long int) yystacksize)); if (yyss + yystacksize - 1 <= yyssp) YYABORT; } YYDPRINTF ((stderr, "Entering state %d\n", yystate)); goto yybackup; /*-----------. | yybackup. | `-----------*/ yybackup: /* Do appropriate processing given the current state. */ /* Read a lookahead token if we need one and don't already have one. */ /* yyresume: */ /* First try to decide what to do without reference to lookahead token. */ yyn = yypact[yystate]; if (yyn == YYPACT_NINF) goto yydefault; /* Not known => get a lookahead token if don't already have one. */ /* YYCHAR is either YYEMPTY or YYEOF or a valid lookahead symbol. */ if (yychar == YYEMPTY) { YYDPRINTF ((stderr, "Reading a token: ")); yychar = YYLEX; } if (yychar <= YYEOF) { yychar = yytoken = YYEOF; YYDPRINTF ((stderr, "Now at end of input.\n")); } else { yytoken = YYTRANSLATE (yychar); YYDSYMPRINTF ("Next token is", yytoken, &yylval, &yylloc); } /* If the proper action on seeing token YYTOKEN is to reduce or to detect an error, take that action. */ yyn += yytoken; if (yyn < 0 || YYLAST < yyn || yycheck[yyn] != yytoken) goto yydefault; yyn = yytable[yyn]; if (yyn <= 0) { if (yyn == 0 || yyn == YYTABLE_NINF) goto yyerrlab; yyn = -yyn; goto yyreduce; } if (yyn == YYFINAL) YYACCEPT; /* Shift the lookahead token. */ YYDPRINTF ((stderr, "Shifting token %s, ", yytname[yytoken])); /* Discard the token being shifted unless it is eof. */ if (yychar != YYEOF) yychar = YYEMPTY; *++yyvsp = yylval; /* Count tokens shifted since error; after three, turn off error status. */ if (yyerrstatus) yyerrstatus--; yystate = yyn; goto yynewstate; /*-----------------------------------------------------------. | yydefault -- do the default action for the current state. | `-----------------------------------------------------------*/ yydefault: yyn = yydefact[yystate]; if (yyn == 0) goto yyerrlab; goto yyreduce; /*-----------------------------. | yyreduce -- Do a reduction. | `-----------------------------*/ yyreduce: /* yyn is the number of a rule to reduce with. */ yylen = yyr2[yyn]; /* If YYLEN is nonzero, implement the default value of the action: `$$ = $1'. Otherwise, the following line sets YYVAL to garbage. This behavior is undocumented and Bison users should not rely upon it. Assigning to YYVAL unconditionally makes the parser a bit smaller, and it avoids a GCC warning that YYVAL may be used uninitialized. */ yyval = yyvsp[1-yylen]; YY_REDUCE_PRINT (yyn); switch (yyn) { case 2: #line 57 "gram.y" { ((SyckParser *)parser)->root = syck_hdlr_add_node( (SyckParser *)parser, yyvsp[0].nodeData ); } break; case 3: #line 61 "gram.y" { ((SyckParser *)parser)->root = syck_hdlr_add_node( (SyckParser *)parser, yyvsp[0].nodeData ); } break; case 4: #line 65 "gram.y" { ((SyckParser *)parser)->eof = 1; } break; case 8: #line 76 "gram.y" { syck_add_transfer( yyvsp[-1].name, yyvsp[0].nodeData, ((SyckParser *)parser)->taguri_expansion ); yyval.nodeData = yyvsp[0].nodeData; } break; case 9: #line 81 "gram.y" { syck_add_transfer( yyvsp[-1].name, yyvsp[0].nodeData, 0 ); yyval.nodeData = yyvsp[0].nodeData; } break; case 10: #line 86 "gram.y" { /* * _Anchors_: The language binding must keep a separate symbol table * for anchors. The actual ID in the symbol table is returned to the * higher nodes, though. */ yyval.nodeData = syck_hdlr_add_anchor( (SyckParser *)parser, yyvsp[-1].name, yyvsp[0].nodeData ); } break; case 11: #line 95 "gram.y" { yyval.nodeData = yyvsp[-1].nodeData; } break; case 14: #line 105 "gram.y" { yyval.nodeData = yyvsp[-1].nodeData; } break; case 15: #line 109 "gram.y" { NULL_NODE( parser, n ); yyval.nodeData = n; } break; case 16: #line 114 "gram.y" { if ( ((SyckParser *)parser)->implicit_typing == 1 ) { try_tag_implicit( yyvsp[0].nodeData, ((SyckParser *)parser)->taguri_expansion ); } yyval.nodeData = yyvsp[0].nodeData; } break; case 17: #line 122 "gram.y" { syck_add_transfer( yyvsp[-1].name, yyvsp[0].nodeData, ((SyckParser *)parser)->taguri_expansion ); yyval.nodeData = yyvsp[0].nodeData; } break; case 18: #line 127 "gram.y" { syck_add_transfer( yyvsp[-1].name, yyvsp[0].nodeData, 0 ); yyval.nodeData = yyvsp[0].nodeData; } break; case 19: #line 132 "gram.y" { /* * _Anchors_: The language binding must keep a separate symbol table * for anchors. The actual ID in the symbol table is returned to the * higher nodes, though. */ yyval.nodeData = syck_hdlr_add_anchor( (SyckParser *)parser, yyvsp[-1].name, yyvsp[0].nodeData ); } break; case 26: #line 165 "gram.y" { syck_add_transfer( yyvsp[-1].name, yyvsp[0].nodeData, ((SyckParser *)parser)->taguri_expansion ); yyval.nodeData = yyvsp[0].nodeData; } break; case 27: #line 170 "gram.y" { syck_add_transfer( yyvsp[-1].name, yyvsp[0].nodeData, 0 ); yyval.nodeData = yyvsp[0].nodeData; } break; case 28: #line 175 "gram.y" { if ( ((SyckParser *)parser)->implicit_typing == 1 ) { try_tag_implicit( yyvsp[0].nodeData, ((SyckParser *)parser)->taguri_expansion ); } yyval.nodeData = yyvsp[0].nodeData; } break; case 29: #line 183 "gram.y" { yyval.nodeData = syck_hdlr_add_anchor( (SyckParser *)parser, yyvsp[-1].name, yyvsp[0].nodeData ); } break; case 30: #line 187 "gram.y" { /* * _Aliases_: The anchor symbol table is scanned for the anchor name. * The anchor's ID in the language's symbol table is returned. */ yyval.nodeData = syck_hdlr_get_anchor( (SyckParser *)parser, yyvsp[0].name ); } break; case 31: #line 195 "gram.y" { SyckNode *n = yyvsp[0].nodeData; if ( ((SyckParser *)parser)->taguri_expansion == 1 ) { n->type_id = syck_taguri( YAML_DOMAIN, "str", 3 ); } else { n->type_id = syck_strndup( "str", 3 ); } yyval.nodeData = n; } break; case 33: #line 209 "gram.y" { yyval.nodeData = yyvsp[-1].nodeData; } break; case 39: #line 229 "gram.y" { yyval.nodeData = yyvsp[-1].nodeData; } break; case 40: #line 233 "gram.y" { yyval.nodeData = yyvsp[-1].nodeData; } break; case 41: #line 239 "gram.y" { yyval.nodeId = syck_hdlr_add_node( (SyckParser *)parser, yyvsp[0].nodeData ); } break; case 42: #line 245 "gram.y" { syck_add_transfer( yyvsp[-2].name, yyvsp[0].nodeData, ((SyckParser *)parser)->taguri_expansion ); yyval.nodeData = yyvsp[0].nodeData; } break; case 43: #line 250 "gram.y" { syck_add_transfer( yyvsp[-1].name, yyvsp[0].nodeData, ((SyckParser *)parser)->taguri_expansion ); yyval.nodeData = yyvsp[0].nodeData; } break; case 44: #line 255 "gram.y" { syck_add_transfer( yyvsp[-2].name, yyvsp[0].nodeData, 0 ); yyval.nodeData = yyvsp[0].nodeData; } break; case 45: #line 260 "gram.y" { syck_add_transfer( yyvsp[-1].name, yyvsp[0].nodeData, 0 ); yyval.nodeData = yyvsp[0].nodeData; } break; case 46: #line 265 "gram.y" { yyval.nodeData = syck_hdlr_add_anchor( (SyckParser *)parser, yyvsp[-2].name, yyvsp[0].nodeData ); } break; case 47: #line 269 "gram.y" { yyval.nodeData = syck_hdlr_add_anchor( (SyckParser *)parser, yyvsp[-1].name, yyvsp[0].nodeData ); } break; case 48: #line 275 "gram.y" { yyval.nodeData = syck_new_seq( yyvsp[0].nodeId ); } break; case 49: #line 279 "gram.y" { syck_seq_add( yyvsp[-2].nodeData, yyvsp[0].nodeId ); yyval.nodeData = yyvsp[-2].nodeData; } break; case 50: #line 284 "gram.y" { yyval.nodeData = yyvsp[-1].nodeData; } break; case 51: #line 293 "gram.y" { yyval.nodeData = yyvsp[-1].nodeData; } break; case 52: #line 297 "gram.y" { yyval.nodeData = syck_alloc_seq(); } break; case 53: #line 303 "gram.y" { yyval.nodeData = syck_new_seq( syck_hdlr_add_node( (SyckParser *)parser, yyvsp[0].nodeData ) ); } break; case 54: #line 307 "gram.y" { syck_seq_add( yyvsp[-2].nodeData, syck_hdlr_add_node( (SyckParser *)parser, yyvsp[0].nodeData ) ); yyval.nodeData = yyvsp[-2].nodeData; } break; case 57: #line 321 "gram.y" { apply_seq_in_map( (SyckParser *)parser, yyvsp[-1].nodeData ); yyval.nodeData = yyvsp[-1].nodeData; } break; case 58: #line 326 "gram.y" { apply_seq_in_map( (SyckParser *)parser, yyvsp[-1].nodeData ); yyval.nodeData = yyvsp[-1].nodeData; } break; case 59: #line 333 "gram.y" { syck_add_transfer( yyvsp[-2].name, yyvsp[0].nodeData, ((SyckParser *)parser)->taguri_expansion ); yyval.nodeData = yyvsp[0].nodeData; } break; case 60: #line 338 "gram.y" { syck_add_transfer( yyvsp[-1].name, yyvsp[0].nodeData, ((SyckParser *)parser)->taguri_expansion ); yyval.nodeData = yyvsp[0].nodeData; } break; case 61: #line 343 "gram.y" { syck_add_transfer( yyvsp[-2].name, yyvsp[0].nodeData, 0 ); yyval.nodeData = yyvsp[0].nodeData; } break; case 62: #line 348 "gram.y" { syck_add_transfer( yyvsp[-1].name, yyvsp[0].nodeData, 0 ); yyval.nodeData = yyvsp[0].nodeData; } break; case 63: #line 353 "gram.y" { yyval.nodeData = syck_hdlr_add_anchor( (SyckParser *)parser, yyvsp[-2].name, yyvsp[0].nodeData ); } break; case 64: #line 357 "gram.y" { yyval.nodeData = syck_hdlr_add_anchor( (SyckParser *)parser, yyvsp[-1].name, yyvsp[0].nodeData ); } break; case 66: #line 364 "gram.y" { yyval.nodeData = yyvsp[-1].nodeData; } break; case 68: #line 380 "gram.y" { yyval.nodeData = syck_new_map( syck_hdlr_add_node( (SyckParser *)parser, yyvsp[-2].nodeData ), syck_hdlr_add_node( (SyckParser *)parser, yyvsp[0].nodeData ) ); } break; case 70: #line 398 "gram.y" { if ( yyvsp[-2].nodeData->shortcut == NULL ) { yyvsp[-2].nodeData->shortcut = syck_new_seq( yyvsp[0].nodeId ); } else { syck_seq_add( yyvsp[-2].nodeData->shortcut, yyvsp[0].nodeId ); } yyval.nodeData = yyvsp[-2].nodeData; } break; case 71: #line 410 "gram.y" { apply_seq_in_map( (SyckParser *)parser, yyvsp[-2].nodeData ); syck_map_update( yyvsp[-2].nodeData, yyvsp[0].nodeData ); syck_free_node( yyvsp[0].nodeData ); yyvsp[0].nodeData = NULL; yyval.nodeData = yyvsp[-2].nodeData; } break; case 72: #line 418 "gram.y" { yyval.nodeData = yyvsp[-1].nodeData; } break; case 73: #line 427 "gram.y" { yyval.nodeData = syck_new_map( syck_hdlr_add_node( (SyckParser *)parser, yyvsp[-2].nodeData ), syck_hdlr_add_node( (SyckParser *)parser, yyvsp[0].nodeData ) ); } break; case 74: #line 435 "gram.y" { yyval.nodeData = yyvsp[-1].nodeData; } break; case 75: #line 439 "gram.y" { yyval.nodeData = syck_alloc_map(); } break; case 77: #line 446 "gram.y" { syck_map_update( yyvsp[-2].nodeData, yyvsp[0].nodeData ); syck_free_node( yyvsp[0].nodeData ); yyvsp[0].nodeData = NULL; yyval.nodeData = yyvsp[-2].nodeData; } break; case 78: #line 455 "gram.y" { NULL_NODE( parser, n ); yyval.nodeData = syck_new_map( syck_hdlr_add_node( (SyckParser *)parser, yyvsp[0].nodeData ), syck_hdlr_add_node( (SyckParser *)parser, n ) ); } break; } /* Line 1010 of yacc.c. */ #line 1651 "gram.c" yyvsp -= yylen; yyssp -= yylen; YY_STACK_PRINT (yyss, yyssp); *++yyvsp = yyval; /* Now `shift' the result of the reduction. Determine what state that goes to, based on the state we popped back to and the rule number reduced by. */ yyn = yyr1[yyn]; yystate = yypgoto[yyn - YYNTOKENS] + *yyssp; if (0 <= yystate && yystate <= YYLAST && yycheck[yystate] == *yyssp) yystate = yytable[yystate]; else yystate = yydefgoto[yyn - YYNTOKENS]; goto yynewstate; /*------------------------------------. | yyerrlab -- here on detecting error | `------------------------------------*/ yyerrlab: /* If not already recovering from an error, report this error. */ if (!yyerrstatus) { ++yynerrs; #if YYERROR_VERBOSE yyn = yypact[yystate]; if (YYPACT_NINF < yyn && yyn < YYLAST) { YYSIZE_T yysize = 0; int yytype = YYTRANSLATE (yychar); const char* yyprefix; char *yymsg; int yyx; /* Start YYX at -YYN if negative to avoid negative indexes in YYCHECK. */ int yyxbegin = yyn < 0 ? -yyn : 0; /* Stay within bounds of both yycheck and yytname. */ int yychecklim = YYLAST - yyn; int yyxend = yychecklim < YYNTOKENS ? yychecklim : YYNTOKENS; int yycount = 0; yyprefix = ", expecting "; for (yyx = yyxbegin; yyx < yyxend; ++yyx) if (yycheck[yyx + yyn] == yyx && yyx != YYTERROR) { yysize += yystrlen (yyprefix) + yystrlen (yytname [yyx]); yycount += 1; if (yycount == 5) { yysize = 0; break; } } yysize += (sizeof ("syntax error, unexpected ") + yystrlen (yytname[yytype])); yymsg = (char *) YYSTACK_ALLOC (yysize); if (yymsg != 0) { char *yyp = yystpcpy (yymsg, "syntax error, unexpected "); yyp = yystpcpy (yyp, yytname[yytype]); if (yycount < 5) { yyprefix = ", expecting "; for (yyx = yyxbegin; yyx < yyxend; ++yyx) if (yycheck[yyx + yyn] == yyx && yyx != YYTERROR) { yyp = yystpcpy (yyp, yyprefix); yyp = yystpcpy (yyp, yytname[yyx]); yyprefix = " or "; } } yyerror (yymsg); YYSTACK_FREE (yymsg); } else yyerror ("syntax error; also virtual memory exhausted"); } else #endif /* YYERROR_VERBOSE */ yyerror ("syntax error"); } if (yyerrstatus == 3) { /* If just tried and failed to reuse lookahead token after an error, discard it. */ if (yychar <= YYEOF) { /* If at end of input, pop the error token, then the rest of the stack, then return failure. */ if (yychar == YYEOF) for (;;) { YYPOPSTACK; if (yyssp == yyss) YYABORT; YYDSYMPRINTF ("Error: popping", yystos[*yyssp], yyvsp, yylsp); yydestruct (yystos[*yyssp], yyvsp); } } else { YYDSYMPRINTF ("Error: discarding", yytoken, &yylval, &yylloc); yydestruct (yytoken, &yylval); yychar = YYEMPTY; } } /* Else will try to reuse lookahead token after shifting the error token. */ goto yyerrlab1; /*---------------------------------------------------. | yyerrorlab -- error raised explicitly by YYERROR. | `---------------------------------------------------*/ yyerrorlab: #ifdef __GNUC__ /* Pacify GCC when the user code never invokes YYERROR and the label yyerrorlab therefore never appears in user code. */ if (0) goto yyerrorlab; #endif yyvsp -= yylen; yyssp -= yylen; yystate = *yyssp; goto yyerrlab1; /*-------------------------------------------------------------. | yyerrlab1 -- common code for both syntax error and YYERROR. | `-------------------------------------------------------------*/ yyerrlab1: yyerrstatus = 3; /* Each real token shifted decrements this. */ for (;;) { yyn = yypact[yystate]; if (yyn != YYPACT_NINF) { yyn += YYTERROR; if (0 <= yyn && yyn <= YYLAST && yycheck[yyn] == YYTERROR) { yyn = yytable[yyn]; if (0 < yyn) break; } } /* Pop the current state because it cannot handle the error token. */ if (yyssp == yyss) YYABORT; YYDSYMPRINTF ("Error: popping", yystos[*yyssp], yyvsp, yylsp); yydestruct (yystos[yystate], yyvsp); YYPOPSTACK; yystate = *yyssp; YY_STACK_PRINT (yyss, yyssp); } if (yyn == YYFINAL) YYACCEPT; YYDPRINTF ((stderr, "Shifting error token, ")); *++yyvsp = yylval; yystate = yyn; goto yynewstate; /*-------------------------------------. | yyacceptlab -- YYACCEPT comes here. | `-------------------------------------*/ yyacceptlab: yyresult = 0; goto yyreturn; /*-----------------------------------. | yyabortlab -- YYABORT comes here. | `-----------------------------------*/ yyabortlab: yyresult = 1; goto yyreturn; #ifndef yyoverflow /*----------------------------------------------. | yyoverflowlab -- parser overflow comes here. | `----------------------------------------------*/ yyoverflowlab: yyerror ("parser stack overflow"); yyresult = 2; /* Fall through. */ #endif yyreturn: #ifndef yyoverflow if (yyss != yyssa) YYSTACK_FREE (yyss); #endif return yyresult; } #line 464 "gram.y" void apply_seq_in_map( SyckParser *parser, SyckNode *n ) { long map_len; if ( n->shortcut == NULL ) { return; } map_len = syck_map_count( n ); syck_map_assign( n, map_value, map_len - 1, syck_hdlr_add_node( parser, n->shortcut ) ); n->shortcut = NULL; } ruby-mkrf-0.2.3.orig/test/sample_files/syck-0.55/lib/node.c0000644000000000000000000001673511672453175021735 0ustar rootroot/* * node.c * * $Author: why $ * $Date: 2005/04/13 06:27:54 $ * * Copyright (C) 2003 why the lucky stiff */ #include "syck.h" /* * Node allocation functions */ SyckNode * syck_alloc_node( enum syck_kind_tag type ) { SyckNode *s; s = S_ALLOC( SyckNode ); s->kind = type; s->id = 0; s->type_id = NULL; s->anchor = NULL; s->shortcut = NULL; return s; } void syck_free_node( SyckNode *n ) { syck_free_members( n ); if ( n->type_id != NULL ) { S_FREE( n->type_id ); n->type_id = NULL; } if ( n->anchor != NULL ) { S_FREE( n->anchor ); n->anchor = NULL; } S_FREE( n ); } SyckNode * syck_alloc_map() { SyckNode *n; struct SyckMap *m; m = S_ALLOC( struct SyckMap ); m->style = map_none; m->idx = 0; m->capa = ALLOC_CT; m->keys = S_ALLOC_N( SYMID, m->capa ); m->values = S_ALLOC_N( SYMID, m->capa ); n = syck_alloc_node( syck_map_kind ); n->data.pairs = m; return n; } SyckNode * syck_alloc_seq() { SyckNode *n; struct SyckSeq *s; s = S_ALLOC( struct SyckSeq ); s->style = seq_none; s->idx = 0; s->capa = ALLOC_CT; s->items = S_ALLOC_N( SYMID, s->capa ); n = syck_alloc_node( syck_seq_kind ); n->data.list = s; return n; } SyckNode * syck_alloc_str() { SyckNode *n; struct SyckStr *s; s = S_ALLOC( struct SyckStr ); s->len = 0; s->ptr = NULL; s->style = scalar_none; n = syck_alloc_node( syck_str_kind ); n->data.str = s; return n; } SyckNode * syck_new_str( char *str, enum scalar_style style ) { return syck_new_str2( str, strlen( str ), style ); } SyckNode * syck_new_str2( char *str, long len, enum scalar_style style ) { SyckNode *n; n = syck_alloc_str(); n->data.str->ptr = S_ALLOC_N( char, len + 1 ); n->data.str->len = len; n->data.str->style = style; memcpy( n->data.str->ptr, str, len ); n->data.str->ptr[len] = '\0'; return n; } void syck_replace_str( SyckNode *n, char *str, enum scalar_style style ) { return syck_replace_str2( n, str, strlen( str ), style ); } void syck_replace_str2( SyckNode *n, char *str, long len, enum scalar_style style ) { if ( n->data.str != NULL ) { S_FREE( n->data.str->ptr ); n->data.str->ptr = NULL; n->data.str->len = 0; } n->data.str->ptr = S_ALLOC_N( char, len + 1 ); n->data.str->len = len; n->data.str->style = style; memcpy( n->data.str->ptr, str, len ); n->data.str->ptr[len] = '\0'; } void syck_str_blow_away_commas( SyckNode *n ) { char *go, *end; go = n->data.str->ptr; end = go + n->data.str->len; while ( *(++go) != '\0' ) { if ( *go == ',' ) { n->data.str->len -= 1; memmove( go, go + 1, end - go ); end -= 1; } } } char * syck_str_read( SyckNode *n ) { ASSERT( n != NULL ); return n->data.str->ptr; } SyckNode * syck_new_map( SYMID key, SYMID value ) { SyckNode *n; n = syck_alloc_map(); syck_map_add( n, key, value ); return n; } void syck_map_empty( SyckNode *n ) { struct SyckMap *m; ASSERT( n != NULL ); ASSERT( n->data.list != NULL ); S_FREE( n->data.pairs->keys ); S_FREE( n->data.pairs->values ); m = n->data.pairs; m->idx = 0; m->capa = ALLOC_CT; m->keys = S_ALLOC_N( SYMID, m->capa ); m->values = S_ALLOC_N( SYMID, m->capa ); } void syck_map_add( SyckNode *map, SYMID key, SYMID value ) { struct SyckMap *m; long idx; ASSERT( map != NULL ); ASSERT( map->data.pairs != NULL ); m = map->data.pairs; idx = m->idx; m->idx += 1; if ( m->idx > m->capa ) { m->capa += ALLOC_CT; S_REALLOC_N( m->keys, SYMID, m->capa ); S_REALLOC_N( m->values, SYMID, m->capa ); } m->keys[idx] = key; m->values[idx] = value; } void syck_map_update( SyckNode *map1, SyckNode *map2 ) { struct SyckMap *m1, *m2; long new_idx, new_capa; ASSERT( map1 != NULL ); ASSERT( map2 != NULL ); m1 = map1->data.pairs; m2 = map2->data.pairs; if ( m2->idx < 1 ) return; new_idx = m1->idx; new_idx += m2->idx; new_capa = m1->capa; while ( new_idx > new_capa ) { new_capa += ALLOC_CT; } if ( new_capa > m1->capa ) { m1->capa = new_capa; S_REALLOC_N( m1->keys, SYMID, m1->capa ); S_REALLOC_N( m1->values, SYMID, m1->capa ); } for ( new_idx = 0; new_idx < m2->idx; m1->idx++, new_idx++ ) { m1->keys[m1->idx] = m2->keys[new_idx]; m1->values[m1->idx] = m2->values[new_idx]; } } long syck_map_count( SyckNode *map ) { ASSERT( map != NULL ); ASSERT( map->data.pairs != NULL ); return map->data.pairs->idx; } void syck_map_assign( SyckNode *map, enum map_part p, long idx, SYMID id ) { struct SyckMap *m; ASSERT( map != NULL ); m = map->data.pairs; ASSERT( m != NULL ); if ( p == map_key ) { m->keys[idx] = id; } else { m->values[idx] = id; } } SYMID syck_map_read( SyckNode *map, enum map_part p, long idx ) { struct SyckMap *m; ASSERT( map != NULL ); m = map->data.pairs; ASSERT( m != NULL ); if ( p == map_key ) { return m->keys[idx]; } else { return m->values[idx]; } } SyckNode * syck_new_seq( SYMID value ) { SyckNode *n; n = syck_alloc_seq(); syck_seq_add( n, value ); return n; } void syck_seq_empty( SyckNode *n ) { struct SyckSeq *s; ASSERT( n != NULL ); ASSERT( n->data.list != NULL ); S_FREE( n->data.list->items ); s = n->data.list; s->idx = 0; s->capa = ALLOC_CT; s->items = S_ALLOC_N( SYMID, s->capa ); } void syck_seq_add( SyckNode *arr, SYMID value ) { struct SyckSeq *s; long idx; ASSERT( arr != NULL ); ASSERT( arr->data.list != NULL ); s = arr->data.list; idx = s->idx; s->idx += 1; if ( s->idx > s->capa ) { s->capa += ALLOC_CT; S_REALLOC_N( s->items, SYMID, s->capa ); } s->items[idx] = value; } long syck_seq_count( SyckNode *seq ) { ASSERT( seq != NULL ); ASSERT( seq->data.list != NULL ); return seq->data.list->idx; } void syck_seq_assign( SyckNode *seq, long idx, SYMID id ) { struct SyckSeq *s; ASSERT( map != NULL ); s = seq->data.list; ASSERT( m != NULL ); s->items[idx] = id; } SYMID syck_seq_read( SyckNode *seq, long idx ) { struct SyckSeq *s; ASSERT( seq != NULL ); s = seq->data.list; ASSERT( s != NULL ); return s->items[idx]; } void syck_free_members( SyckNode *n ) { if ( n == NULL ) return; switch ( n->kind ) { case syck_str_kind: if ( n->data.str != NULL ) { S_FREE( n->data.str->ptr ); n->data.str->ptr = NULL; n->data.str->len = 0; S_FREE( n->data.str ); n->data.str = NULL; } break; case syck_seq_kind: if ( n->data.list != NULL ) { S_FREE( n->data.list->items ); S_FREE( n->data.list ); n->data.list = NULL; } break; case syck_map_kind: if ( n->data.pairs != NULL ) { S_FREE( n->data.pairs->keys ); S_FREE( n->data.pairs->values ); S_FREE( n->data.pairs ); n->data.pairs = NULL; } break; } } ruby-mkrf-0.2.3.orig/test/sample_files/syck-0.55/lib/syck.c0000644000000000000000000002356511672453175021760 0ustar rootroot/* * syck.c * * $Author: why $ * $Date: 2005/01/01 02:06:25 $ * * Copyright (C) 2003 why the lucky stiff */ #include #include #include "syck.h" void syck_parser_pop_level( SyckParser * ); /* * Custom assert */ void syck_assert( char *file_name, unsigned line_num ) { fflush( NULL ); fprintf( stderr, "\nAssertion failed: %s, line %u\n", file_name, line_num ); fflush( stderr ); abort(); } /* * Allocates and copies a string */ char * syck_strndup( char *buf, long len ) { char *new = S_ALLOC_N( char, len + 1 ); S_MEMZERO( new, char, len + 1 ); S_MEMCPY( new, buf, char, len ); return new; } /* * Default FILE IO function */ long syck_io_file_read( char *buf, SyckIoFile *file, long max_size, long skip ) { long len = 0; ASSERT( file != NULL ); max_size -= skip; len = fread( buf + skip, sizeof( char ), max_size, file->ptr ); len += skip; buf[len] = '\0'; return len; } /* * Default string IO function */ long syck_io_str_read( char *buf, SyckIoStr *str, long max_size, long skip ) { char *beg; long len = 0; ASSERT( str != NULL ); beg = str->ptr; if ( max_size >= 0 ) { max_size -= skip; if ( max_size <= 0 ) max_size = 0; else str->ptr += max_size; if ( str->ptr > str->end ) { str->ptr = str->end; } } else { /* Use exact string length */ while ( str->ptr < str->end ) { if (*(str->ptr++) == '\n') break; } } if ( beg < str->ptr ) { len = ( str->ptr - beg ); S_MEMCPY( buf + skip, beg, char, len ); } len += skip; buf[len] = '\0'; return len; } void syck_parser_reset_levels( SyckParser *p ) { while ( p->lvl_idx > 1 ) { syck_parser_pop_level( p ); } if ( p->lvl_idx < 1 ) { p->lvl_idx = 1; p->levels[0].spaces = -1; p->levels[0].ncount = 0; p->levels[0].domain = syck_strndup( "", 0 ); } p->levels[0].status = syck_lvl_header; } void syck_parser_reset_cursor( SyckParser *p ) { if ( p->buffer == NULL ) { p->buffer = S_ALLOC_N( char, p->bufsize ); S_MEMZERO( p->buffer, char, p->bufsize ); } p->buffer[0] = '\0'; p->cursor = NULL; p->lineptr = NULL; p->linectptr = NULL; p->token = NULL; p->toktmp = NULL; p->marker = NULL; p->limit = NULL; p->root = 0; p->root_on_error = 0; p->linect = 0; p->eof = 0; p->last_token = 0; p->force_token = 0; } /* * Value to return on a parse error */ void syck_parser_set_root_on_error( SyckParser *p, SYMID roer ) { p->root_on_error = roer; } /* * Allocate the parser */ SyckParser * syck_new_parser() { SyckParser *p; p = S_ALLOC( SyckParser ); S_MEMZERO( p, SyckParser, 1 ); p->lvl_capa = ALLOC_CT; p->levels = S_ALLOC_N( SyckLevel, p->lvl_capa ); p->input_type = syck_yaml_utf8; p->io_type = syck_io_str; p->io.str = NULL; p->syms = NULL; p->anchors = NULL; p->bad_anchors = NULL; p->implicit_typing = 1; p->taguri_expansion = 0; p->bufsize = SYCK_BUFFERSIZE; p->buffer = NULL; p->lvl_idx = 0; syck_parser_reset_levels( p ); return p; } int syck_add_sym( SyckParser *p, char *data ) { SYMID id = 0; if ( p->syms == NULL ) { p->syms = st_init_numtable(); } id = p->syms->num_entries + 1; st_insert( p->syms, id, (st_data_t)data ); return id; } int syck_lookup_sym( SyckParser *p, SYMID id, char **data ) { if ( p->syms == NULL ) return 0; return st_lookup( p->syms, id, (st_data_t *)data ); } int syck_st_free_nodes( char *key, SyckNode *n, char *arg ) { if ( n != (void *)1 ) syck_free_node( n ); n = NULL; return ST_CONTINUE; } void syck_st_free( SyckParser *p ) { /* * Free the anchor tables */ if ( p->anchors != NULL ) { st_foreach( p->anchors, syck_st_free_nodes, 0 ); st_free_table( p->anchors ); p->anchors = NULL; } if ( p->bad_anchors != NULL ) { st_foreach( p->bad_anchors, syck_st_free_nodes, 0 ); st_free_table( p->bad_anchors ); p->bad_anchors = NULL; } } void syck_free_parser( SyckParser *p ) { /* * Free the adhoc symbol table */ if ( p->syms != NULL ) { st_free_table( p->syms ); p->syms = NULL; } /* * Free tables, levels */ syck_st_free( p ); syck_parser_reset_levels( p ); S_FREE( p->levels[0].domain ); S_FREE( p->levels ); if ( p->buffer != NULL ) { S_FREE( p->buffer ); } free_any_io( p ); S_FREE( p ); } void syck_parser_handler( SyckParser *p, SyckNodeHandler hdlr ) { ASSERT( p != NULL ); p->handler = hdlr; } void syck_parser_implicit_typing( SyckParser *p, int flag ) { p->implicit_typing = ( flag == 0 ? 0 : 1 ); } void syck_parser_taguri_expansion( SyckParser *p, int flag ) { p->taguri_expansion = ( flag == 0 ? 0 : 1 ); } void syck_parser_error_handler( SyckParser *p, SyckErrorHandler hdlr ) { ASSERT( p != NULL ); p->error_handler = hdlr; } void syck_parser_bad_anchor_handler( SyckParser *p, SyckBadAnchorHandler hdlr ) { ASSERT( p != NULL ); p->bad_anchor_handler = hdlr; } void syck_parser_set_input_type( SyckParser *p, enum syck_parser_input input_type ) { ASSERT( p != NULL ); p->input_type = input_type; } void syck_parser_file( SyckParser *p, FILE *fp, SyckIoFileRead read ) { ASSERT( p != NULL ); free_any_io( p ); syck_parser_reset_cursor( p ); p->io_type = syck_io_file; p->io.file = S_ALLOC( SyckIoFile ); p->io.file->ptr = fp; if ( read != NULL ) { p->io.file->read = read; } else { p->io.file->read = syck_io_file_read; } } void syck_parser_str( SyckParser *p, char *ptr, long len, SyckIoStrRead read ) { ASSERT( p != NULL ); free_any_io( p ); syck_parser_reset_cursor( p ); p->io_type = syck_io_str; p->io.str = S_ALLOC( SyckIoStr ); p->io.str->beg = ptr; p->io.str->ptr = ptr; p->io.str->end = ptr + len; if ( read != NULL ) { p->io.str->read = read; } else { p->io.str->read = syck_io_str_read; } } void syck_parser_str_auto( SyckParser *p, char *ptr, SyckIoStrRead read ) { syck_parser_str( p, ptr, strlen( ptr ), read ); } SyckLevel * syck_parser_current_level( SyckParser *p ) { return &p->levels[p->lvl_idx-1]; } void syck_parser_pop_level( SyckParser *p ) { ASSERT( p != NULL ); /* The root level should never be popped */ if ( p->lvl_idx <= 1 ) return; p->lvl_idx -= 1; free( p->levels[p->lvl_idx].domain ); } void syck_parser_add_level( SyckParser *p, int len, enum syck_level_status status ) { ASSERT( p != NULL ); if ( p->lvl_idx + 1 > p->lvl_capa ) { p->lvl_capa += ALLOC_CT; S_REALLOC_N( p->levels, SyckLevel, p->lvl_capa ); } ASSERT( len > p->levels[p->lvl_idx-1].spaces ); p->levels[p->lvl_idx].spaces = len; p->levels[p->lvl_idx].ncount = 0; p->levels[p->lvl_idx].domain = syck_strndup( p->levels[p->lvl_idx-1].domain, strlen( p->levels[p->lvl_idx-1].domain ) ); p->levels[p->lvl_idx].status = status; p->lvl_idx += 1; } void free_any_io( SyckParser *p ) { ASSERT( p != NULL ); switch ( p->io_type ) { case syck_io_str: if ( p->io.str != NULL ) { S_FREE( p->io.str ); p->io.str = NULL; } break; case syck_io_file: if ( p->io.file != NULL ) { S_FREE( p->io.file ); p->io.file = NULL; } break; } } long syck_move_tokens( SyckParser *p ) { long count, skip; ASSERT( p->buffer != NULL ); if ( p->token == NULL ) return 0; skip = p->limit - p->token; if ( skip < 1 ) return 0; if ( ( count = p->token - p->buffer ) ) { S_MEMMOVE( p->buffer, p->token, char, skip ); p->token = p->buffer; p->marker -= count; p->cursor -= count; p->toktmp -= count; p->limit -= count; p->lineptr -= count; p->linectptr -= count; } return skip; } void syck_check_limit( SyckParser *p, long len ) { if ( p->cursor == NULL ) { p->cursor = p->buffer; p->lineptr = p->buffer; p->linectptr = p->buffer; p->marker = p->buffer; } p->limit = p->buffer + len; } long syck_parser_read( SyckParser *p ) { long len = 0; long skip = 0; ASSERT( p != NULL ); switch ( p->io_type ) { case syck_io_str: skip = syck_move_tokens( p ); len = (p->io.str->read)( p->buffer, p->io.str, SYCK_BUFFERSIZE - 1, skip ); break; case syck_io_file: skip = syck_move_tokens( p ); len = (p->io.file->read)( p->buffer, p->io.file, SYCK_BUFFERSIZE - 1, skip ); break; } syck_check_limit( p, len ); return len; } long syck_parser_readlen( SyckParser *p, long max_size ) { long len = 0; long skip = 0; ASSERT( p != NULL ); switch ( p->io_type ) { case syck_io_str: skip = syck_move_tokens( p ); len = (p->io.str->read)( p->buffer, p->io.str, max_size, skip ); break; case syck_io_file: skip = syck_move_tokens( p ); len = (p->io.file->read)( p->buffer, p->io.file, max_size, skip ); break; } syck_check_limit( p, len ); return len; } SYMID syck_parse( SyckParser *p ) { ASSERT( p != NULL ); syck_st_free( p ); syck_parser_reset_levels( p ); syckparse( p ); return p->root; } void syck_default_error_handler( SyckParser *p, char *msg ) { printf( "Error at [Line %d, Col %d]: %s\n", p->linect, p->cursor - p->lineptr, msg ); } ruby-mkrf-0.2.3.orig/test/sample_files/syck-0.55/lib/gram.output0000644000000000000000000014717211672453175023054 0ustar rootrootGrammar 0 $accept: doc $end 1 doc: atom 2 | YAML_DOCSEP atom_or_empty 3 | /* empty */ 4 atom: word_rep 5 | ind_rep 6 ind_rep: struct_rep 7 | YAML_TRANSFER ind_rep 8 | YAML_TAGURI ind_rep 9 | YAML_ANCHOR ind_rep 10 | indent_open ind_rep indent_flex_end 11 atom_or_empty: atom 12 | empty 13 empty: indent_open empty indent_end 14 | /* empty */ 15 | YAML_ITRANSFER empty 16 | YAML_TRANSFER empty 17 | YAML_TAGURI empty 18 | YAML_ANCHOR empty 19 indent_open: YAML_IOPEN 20 | indent_open YAML_INDENT 21 indent_end: YAML_IEND 22 indent_sep: YAML_INDENT 23 indent_flex_end: YAML_IEND 24 | indent_sep indent_flex_end 25 word_rep: YAML_TRANSFER word_rep 26 | YAML_TAGURI word_rep 27 | YAML_ITRANSFER word_rep 28 | YAML_ANCHOR word_rep 29 | YAML_ALIAS 30 | YAML_WORD 31 | YAML_PLAIN 32 | indent_open word_rep indent_flex_end 33 struct_rep: YAML_BLOCK 34 | implicit_seq 35 | inline_seq 36 | implicit_map 37 | inline_map 38 implicit_seq: indent_open top_imp_seq indent_end 39 | indent_open in_implicit_seq indent_end 40 basic_seq: '-' atom_or_empty 41 top_imp_seq: YAML_TRANSFER indent_sep in_implicit_seq 42 | YAML_TRANSFER top_imp_seq 43 | YAML_TAGURI indent_sep in_implicit_seq 44 | YAML_TAGURI top_imp_seq 45 | YAML_ANCHOR indent_sep in_implicit_seq 46 | YAML_ANCHOR top_imp_seq 47 in_implicit_seq: basic_seq 48 | in_implicit_seq indent_sep basic_seq 49 | in_implicit_seq indent_sep 50 inline_seq: '[' in_inline_seq ']' 51 | '[' ']' 52 in_inline_seq: inline_seq_atom 53 | in_inline_seq ',' inline_seq_atom 54 inline_seq_atom: atom 55 | basic_mapping 56 implicit_map: indent_open top_imp_map indent_end 57 | indent_open in_implicit_map indent_end 58 top_imp_map: YAML_TRANSFER indent_sep in_implicit_map 59 | YAML_TRANSFER top_imp_map 60 | YAML_TAGURI indent_sep in_implicit_map 61 | YAML_TAGURI top_imp_map 62 | YAML_ANCHOR indent_sep in_implicit_map 63 | YAML_ANCHOR top_imp_map 64 complex_key: word_rep 65 | '?' atom indent_sep 66 complex_value: atom_or_empty 67 complex_mapping: complex_key ':' complex_value 68 in_implicit_map: complex_mapping 69 | in_implicit_map indent_sep basic_seq 70 | in_implicit_map indent_sep complex_mapping 71 | in_implicit_map indent_sep 72 basic_mapping: atom ':' atom_or_empty 73 inline_map: '{' in_inline_map '}' 74 | '{' '}' 75 in_inline_map: inline_map_atom 76 | in_inline_map ',' inline_map_atom 77 inline_map_atom: atom 78 | basic_mapping Terminals, with rules where they appear $end (0) 0 ',' (44) 53 76 '-' (45) 40 ':' (58) 67 72 '?' (63) 65 '[' (91) 50 51 ']' (93) 50 51 '{' (123) 73 74 '}' (125) 73 74 error (256) YAML_ANCHOR (258) 9 18 28 45 46 62 63 YAML_ALIAS (259) 29 YAML_TRANSFER (260) 7 16 25 41 42 58 59 YAML_TAGURI (261) 8 17 26 43 44 60 61 YAML_ITRANSFER (262) 15 27 YAML_WORD (263) 30 YAML_PLAIN (264) 31 YAML_BLOCK (265) 33 YAML_DOCSEP (266) 2 YAML_IOPEN (267) 19 YAML_INDENT (268) 20 22 YAML_IEND (269) 21 23 Nonterminals, with rules where they appear $accept (23) on left: 0 doc (24) on left: 1 2 3, on right: 0 atom (25) on left: 4 5, on right: 1 11 54 65 72 77 ind_rep (26) on left: 6 7 8 9 10, on right: 5 7 8 9 10 atom_or_empty (27) on left: 11 12, on right: 2 40 66 72 empty (28) on left: 13 14 15 16 17 18, on right: 12 13 15 16 17 18 indent_open (29) on left: 19 20, on right: 10 13 20 32 38 39 56 57 indent_end (30) on left: 21, on right: 13 38 39 56 57 indent_sep (31) on left: 22, on right: 24 41 43 45 48 49 58 60 62 65 69 70 71 indent_flex_end (32) on left: 23 24, on right: 10 24 32 word_rep (33) on left: 25 26 27 28 29 30 31 32, on right: 4 25 26 27 28 32 64 struct_rep (34) on left: 33 34 35 36 37, on right: 6 implicit_seq (35) on left: 38 39, on right: 34 basic_seq (36) on left: 40, on right: 47 48 69 top_imp_seq (37) on left: 41 42 43 44 45 46, on right: 38 42 44 46 in_implicit_seq (38) on left: 47 48 49, on right: 39 41 43 45 48 49 inline_seq (39) on left: 50 51, on right: 35 in_inline_seq (40) on left: 52 53, on right: 50 53 inline_seq_atom (41) on left: 54 55, on right: 52 53 implicit_map (42) on left: 56 57, on right: 36 top_imp_map (43) on left: 58 59 60 61 62 63, on right: 56 59 61 63 complex_key (44) on left: 64 65, on right: 67 complex_value (45) on left: 66, on right: 67 complex_mapping (46) on left: 67, on right: 68 70 in_implicit_map (47) on left: 68 69 70 71, on right: 57 58 60 62 69 70 71 basic_mapping (48) on left: 72, on right: 55 78 inline_map (49) on left: 73 74, on right: 37 in_inline_map (50) on left: 75 76, on right: 73 76 inline_map_atom (51) on left: 77 78, on right: 75 76 state 0 0 $accept: . doc $end YAML_ANCHOR shift, and go to state 1 YAML_ALIAS shift, and go to state 2 YAML_TRANSFER shift, and go to state 3 YAML_TAGURI shift, and go to state 4 YAML_ITRANSFER shift, and go to state 5 YAML_WORD shift, and go to state 6 YAML_PLAIN shift, and go to state 7 YAML_BLOCK shift, and go to state 8 YAML_DOCSEP shift, and go to state 9 YAML_IOPEN shift, and go to state 10 '[' shift, and go to state 11 '{' shift, and go to state 12 $default reduce using rule 3 (doc) doc go to state 13 atom go to state 14 ind_rep go to state 15 indent_open go to state 16 word_rep go to state 17 struct_rep go to state 18 implicit_seq go to state 19 inline_seq go to state 20 implicit_map go to state 21 inline_map go to state 22 state 1 9 ind_rep: YAML_ANCHOR . ind_rep 28 word_rep: YAML_ANCHOR . word_rep YAML_ANCHOR shift, and go to state 1 YAML_ALIAS shift, and go to state 2 YAML_TRANSFER shift, and go to state 3 YAML_TAGURI shift, and go to state 4 YAML_ITRANSFER shift, and go to state 5 YAML_WORD shift, and go to state 6 YAML_PLAIN shift, and go to state 7 YAML_BLOCK shift, and go to state 8 YAML_IOPEN shift, and go to state 10 '[' shift, and go to state 11 '{' shift, and go to state 12 ind_rep go to state 23 indent_open go to state 16 word_rep go to state 24 struct_rep go to state 18 implicit_seq go to state 19 inline_seq go to state 20 implicit_map go to state 21 inline_map go to state 22 state 2 29 word_rep: YAML_ALIAS . $default reduce using rule 29 (word_rep) state 3 7 ind_rep: YAML_TRANSFER . ind_rep 25 word_rep: YAML_TRANSFER . word_rep YAML_ANCHOR shift, and go to state 1 YAML_ALIAS shift, and go to state 2 YAML_TRANSFER shift, and go to state 3 YAML_TAGURI shift, and go to state 4 YAML_ITRANSFER shift, and go to state 5 YAML_WORD shift, and go to state 6 YAML_PLAIN shift, and go to state 7 YAML_BLOCK shift, and go to state 8 YAML_IOPEN shift, and go to state 10 '[' shift, and go to state 11 '{' shift, and go to state 12 ind_rep go to state 25 indent_open go to state 16 word_rep go to state 26 struct_rep go to state 18 implicit_seq go to state 19 inline_seq go to state 20 implicit_map go to state 21 inline_map go to state 22 state 4 8 ind_rep: YAML_TAGURI . ind_rep 26 word_rep: YAML_TAGURI . word_rep YAML_ANCHOR shift, and go to state 1 YAML_ALIAS shift, and go to state 2 YAML_TRANSFER shift, and go to state 3 YAML_TAGURI shift, and go to state 4 YAML_ITRANSFER shift, and go to state 5 YAML_WORD shift, and go to state 6 YAML_PLAIN shift, and go to state 7 YAML_BLOCK shift, and go to state 8 YAML_IOPEN shift, and go to state 10 '[' shift, and go to state 11 '{' shift, and go to state 12 ind_rep go to state 27 indent_open go to state 16 word_rep go to state 28 struct_rep go to state 18 implicit_seq go to state 19 inline_seq go to state 20 implicit_map go to state 21 inline_map go to state 22 state 5 27 word_rep: YAML_ITRANSFER . word_rep YAML_ANCHOR shift, and go to state 29 YAML_ALIAS shift, and go to state 2 YAML_TRANSFER shift, and go to state 30 YAML_TAGURI shift, and go to state 31 YAML_ITRANSFER shift, and go to state 5 YAML_WORD shift, and go to state 6 YAML_PLAIN shift, and go to state 7 YAML_IOPEN shift, and go to state 10 indent_open go to state 32 word_rep go to state 33 state 6 30 word_rep: YAML_WORD . $default reduce using rule 30 (word_rep) state 7 31 word_rep: YAML_PLAIN . $default reduce using rule 31 (word_rep) state 8 33 struct_rep: YAML_BLOCK . $default reduce using rule 33 (struct_rep) state 9 2 doc: YAML_DOCSEP . atom_or_empty YAML_ANCHOR shift, and go to state 34 YAML_ALIAS shift, and go to state 2 YAML_TRANSFER shift, and go to state 35 YAML_TAGURI shift, and go to state 36 YAML_ITRANSFER shift, and go to state 37 YAML_WORD shift, and go to state 6 YAML_PLAIN shift, and go to state 7 YAML_BLOCK shift, and go to state 8 YAML_IOPEN shift, and go to state 10 '[' shift, and go to state 11 '{' shift, and go to state 12 $default reduce using rule 14 (empty) atom go to state 38 ind_rep go to state 15 atom_or_empty go to state 39 empty go to state 40 indent_open go to state 41 word_rep go to state 17 struct_rep go to state 18 implicit_seq go to state 19 inline_seq go to state 20 implicit_map go to state 21 inline_map go to state 22 state 10 19 indent_open: YAML_IOPEN . $default reduce using rule 19 (indent_open) state 11 50 inline_seq: '[' . in_inline_seq ']' 51 | '[' . ']' YAML_ANCHOR shift, and go to state 1 YAML_ALIAS shift, and go to state 2 YAML_TRANSFER shift, and go to state 3 YAML_TAGURI shift, and go to state 4 YAML_ITRANSFER shift, and go to state 5 YAML_WORD shift, and go to state 6 YAML_PLAIN shift, and go to state 7 YAML_BLOCK shift, and go to state 8 YAML_IOPEN shift, and go to state 10 '[' shift, and go to state 11 ']' shift, and go to state 42 '{' shift, and go to state 12 atom go to state 43 ind_rep go to state 15 indent_open go to state 16 word_rep go to state 17 struct_rep go to state 18 implicit_seq go to state 19 inline_seq go to state 20 in_inline_seq go to state 44 inline_seq_atom go to state 45 implicit_map go to state 21 basic_mapping go to state 46 inline_map go to state 22 state 12 73 inline_map: '{' . in_inline_map '}' 74 | '{' . '}' YAML_ANCHOR shift, and go to state 1 YAML_ALIAS shift, and go to state 2 YAML_TRANSFER shift, and go to state 3 YAML_TAGURI shift, and go to state 4 YAML_ITRANSFER shift, and go to state 5 YAML_WORD shift, and go to state 6 YAML_PLAIN shift, and go to state 7 YAML_BLOCK shift, and go to state 8 YAML_IOPEN shift, and go to state 10 '[' shift, and go to state 11 '{' shift, and go to state 12 '}' shift, and go to state 47 atom go to state 48 ind_rep go to state 15 indent_open go to state 16 word_rep go to state 17 struct_rep go to state 18 implicit_seq go to state 19 inline_seq go to state 20 implicit_map go to state 21 basic_mapping go to state 49 inline_map go to state 22 in_inline_map go to state 50 inline_map_atom go to state 51 state 13 0 $accept: doc . $end $end shift, and go to state 52 state 14 1 doc: atom . $default reduce using rule 1 (doc) state 15 5 atom: ind_rep . $default reduce using rule 5 (atom) state 16 10 ind_rep: indent_open . ind_rep indent_flex_end 20 indent_open: indent_open . YAML_INDENT 32 word_rep: indent_open . word_rep indent_flex_end 38 implicit_seq: indent_open . top_imp_seq indent_end 39 | indent_open . in_implicit_seq indent_end 56 implicit_map: indent_open . top_imp_map indent_end 57 | indent_open . in_implicit_map indent_end YAML_ANCHOR shift, and go to state 53 YAML_ALIAS shift, and go to state 2 YAML_TRANSFER shift, and go to state 54 YAML_TAGURI shift, and go to state 55 YAML_ITRANSFER shift, and go to state 5 YAML_WORD shift, and go to state 6 YAML_PLAIN shift, and go to state 7 YAML_BLOCK shift, and go to state 8 YAML_IOPEN shift, and go to state 10 YAML_INDENT shift, and go to state 56 '-' shift, and go to state 57 '[' shift, and go to state 11 '{' shift, and go to state 12 '?' shift, and go to state 58 ind_rep go to state 59 indent_open go to state 16 word_rep go to state 60 struct_rep go to state 18 implicit_seq go to state 19 basic_seq go to state 61 top_imp_seq go to state 62 in_implicit_seq go to state 63 inline_seq go to state 20 implicit_map go to state 21 top_imp_map go to state 64 complex_key go to state 65 complex_mapping go to state 66 in_implicit_map go to state 67 inline_map go to state 22 state 17 4 atom: word_rep . $default reduce using rule 4 (atom) state 18 6 ind_rep: struct_rep . $default reduce using rule 6 (ind_rep) state 19 34 struct_rep: implicit_seq . $default reduce using rule 34 (struct_rep) state 20 35 struct_rep: inline_seq . $default reduce using rule 35 (struct_rep) state 21 36 struct_rep: implicit_map . $default reduce using rule 36 (struct_rep) state 22 37 struct_rep: inline_map . $default reduce using rule 37 (struct_rep) state 23 9 ind_rep: YAML_ANCHOR ind_rep . $default reduce using rule 9 (ind_rep) state 24 28 word_rep: YAML_ANCHOR word_rep . $default reduce using rule 28 (word_rep) state 25 7 ind_rep: YAML_TRANSFER ind_rep . $default reduce using rule 7 (ind_rep) state 26 25 word_rep: YAML_TRANSFER word_rep . $default reduce using rule 25 (word_rep) state 27 8 ind_rep: YAML_TAGURI ind_rep . $default reduce using rule 8 (ind_rep) state 28 26 word_rep: YAML_TAGURI word_rep . $default reduce using rule 26 (word_rep) state 29 28 word_rep: YAML_ANCHOR . word_rep YAML_ANCHOR shift, and go to state 29 YAML_ALIAS shift, and go to state 2 YAML_TRANSFER shift, and go to state 30 YAML_TAGURI shift, and go to state 31 YAML_ITRANSFER shift, and go to state 5 YAML_WORD shift, and go to state 6 YAML_PLAIN shift, and go to state 7 YAML_IOPEN shift, and go to state 10 indent_open go to state 32 word_rep go to state 24 state 30 25 word_rep: YAML_TRANSFER . word_rep YAML_ANCHOR shift, and go to state 29 YAML_ALIAS shift, and go to state 2 YAML_TRANSFER shift, and go to state 30 YAML_TAGURI shift, and go to state 31 YAML_ITRANSFER shift, and go to state 5 YAML_WORD shift, and go to state 6 YAML_PLAIN shift, and go to state 7 YAML_IOPEN shift, and go to state 10 indent_open go to state 32 word_rep go to state 26 state 31 26 word_rep: YAML_TAGURI . word_rep YAML_ANCHOR shift, and go to state 29 YAML_ALIAS shift, and go to state 2 YAML_TRANSFER shift, and go to state 30 YAML_TAGURI shift, and go to state 31 YAML_ITRANSFER shift, and go to state 5 YAML_WORD shift, and go to state 6 YAML_PLAIN shift, and go to state 7 YAML_IOPEN shift, and go to state 10 indent_open go to state 32 word_rep go to state 28 state 32 20 indent_open: indent_open . YAML_INDENT 32 word_rep: indent_open . word_rep indent_flex_end YAML_ANCHOR shift, and go to state 29 YAML_ALIAS shift, and go to state 2 YAML_TRANSFER shift, and go to state 30 YAML_TAGURI shift, and go to state 31 YAML_ITRANSFER shift, and go to state 5 YAML_WORD shift, and go to state 6 YAML_PLAIN shift, and go to state 7 YAML_IOPEN shift, and go to state 10 YAML_INDENT shift, and go to state 56 indent_open go to state 32 word_rep go to state 68 state 33 27 word_rep: YAML_ITRANSFER word_rep . $default reduce using rule 27 (word_rep) state 34 9 ind_rep: YAML_ANCHOR . ind_rep 18 empty: YAML_ANCHOR . empty 28 word_rep: YAML_ANCHOR . word_rep YAML_ANCHOR shift, and go to state 34 YAML_ALIAS shift, and go to state 2 YAML_TRANSFER shift, and go to state 35 YAML_TAGURI shift, and go to state 36 YAML_ITRANSFER shift, and go to state 37 YAML_WORD shift, and go to state 6 YAML_PLAIN shift, and go to state 7 YAML_BLOCK shift, and go to state 8 YAML_IOPEN shift, and go to state 10 '[' shift, and go to state 11 '{' shift, and go to state 12 $default reduce using rule 14 (empty) ind_rep go to state 23 empty go to state 69 indent_open go to state 41 word_rep go to state 24 struct_rep go to state 18 implicit_seq go to state 19 inline_seq go to state 20 implicit_map go to state 21 inline_map go to state 22 state 35 7 ind_rep: YAML_TRANSFER . ind_rep 16 empty: YAML_TRANSFER . empty 25 word_rep: YAML_TRANSFER . word_rep YAML_ANCHOR shift, and go to state 34 YAML_ALIAS shift, and go to state 2 YAML_TRANSFER shift, and go to state 35 YAML_TAGURI shift, and go to state 36 YAML_ITRANSFER shift, and go to state 37 YAML_WORD shift, and go to state 6 YAML_PLAIN shift, and go to state 7 YAML_BLOCK shift, and go to state 8 YAML_IOPEN shift, and go to state 10 '[' shift, and go to state 11 '{' shift, and go to state 12 $default reduce using rule 14 (empty) ind_rep go to state 25 empty go to state 70 indent_open go to state 41 word_rep go to state 26 struct_rep go to state 18 implicit_seq go to state 19 inline_seq go to state 20 implicit_map go to state 21 inline_map go to state 22 state 36 8 ind_rep: YAML_TAGURI . ind_rep 17 empty: YAML_TAGURI . empty 26 word_rep: YAML_TAGURI . word_rep YAML_ANCHOR shift, and go to state 34 YAML_ALIAS shift, and go to state 2 YAML_TRANSFER shift, and go to state 35 YAML_TAGURI shift, and go to state 36 YAML_ITRANSFER shift, and go to state 37 YAML_WORD shift, and go to state 6 YAML_PLAIN shift, and go to state 7 YAML_BLOCK shift, and go to state 8 YAML_IOPEN shift, and go to state 10 '[' shift, and go to state 11 '{' shift, and go to state 12 $default reduce using rule 14 (empty) ind_rep go to state 27 empty go to state 71 indent_open go to state 41 word_rep go to state 28 struct_rep go to state 18 implicit_seq go to state 19 inline_seq go to state 20 implicit_map go to state 21 inline_map go to state 22 state 37 15 empty: YAML_ITRANSFER . empty 27 word_rep: YAML_ITRANSFER . word_rep YAML_ANCHOR shift, and go to state 72 YAML_ALIAS shift, and go to state 2 YAML_TRANSFER shift, and go to state 73 YAML_TAGURI shift, and go to state 74 YAML_ITRANSFER shift, and go to state 37 YAML_WORD shift, and go to state 6 YAML_PLAIN shift, and go to state 7 YAML_IOPEN shift, and go to state 10 $default reduce using rule 14 (empty) empty go to state 75 indent_open go to state 76 word_rep go to state 33 state 38 11 atom_or_empty: atom . $default reduce using rule 11 (atom_or_empty) state 39 2 doc: YAML_DOCSEP atom_or_empty . $default reduce using rule 2 (doc) state 40 12 atom_or_empty: empty . $default reduce using rule 12 (atom_or_empty) state 41 10 ind_rep: indent_open . ind_rep indent_flex_end 13 empty: indent_open . empty indent_end 20 indent_open: indent_open . YAML_INDENT 32 word_rep: indent_open . word_rep indent_flex_end 38 implicit_seq: indent_open . top_imp_seq indent_end 39 | indent_open . in_implicit_seq indent_end 56 implicit_map: indent_open . top_imp_map indent_end 57 | indent_open . in_implicit_map indent_end YAML_ANCHOR shift, and go to state 77 YAML_ALIAS shift, and go to state 2 YAML_TRANSFER shift, and go to state 78 YAML_TAGURI shift, and go to state 79 YAML_ITRANSFER shift, and go to state 37 YAML_WORD shift, and go to state 6 YAML_PLAIN shift, and go to state 7 YAML_BLOCK shift, and go to state 8 YAML_IOPEN shift, and go to state 10 YAML_INDENT shift, and go to state 56 '-' shift, and go to state 57 '[' shift, and go to state 11 '{' shift, and go to state 12 '?' shift, and go to state 58 $default reduce using rule 14 (empty) ind_rep go to state 59 empty go to state 80 indent_open go to state 41 word_rep go to state 60 struct_rep go to state 18 implicit_seq go to state 19 basic_seq go to state 61 top_imp_seq go to state 62 in_implicit_seq go to state 63 inline_seq go to state 20 implicit_map go to state 21 top_imp_map go to state 64 complex_key go to state 65 complex_mapping go to state 66 in_implicit_map go to state 67 inline_map go to state 22 state 42 51 inline_seq: '[' ']' . $default reduce using rule 51 (inline_seq) state 43 54 inline_seq_atom: atom . 72 basic_mapping: atom . ':' atom_or_empty ':' shift, and go to state 81 $default reduce using rule 54 (inline_seq_atom) state 44 50 inline_seq: '[' in_inline_seq . ']' 53 in_inline_seq: in_inline_seq . ',' inline_seq_atom ']' shift, and go to state 82 ',' shift, and go to state 83 state 45 52 in_inline_seq: inline_seq_atom . $default reduce using rule 52 (in_inline_seq) state 46 55 inline_seq_atom: basic_mapping . $default reduce using rule 55 (inline_seq_atom) state 47 74 inline_map: '{' '}' . $default reduce using rule 74 (inline_map) state 48 72 basic_mapping: atom . ':' atom_or_empty 77 inline_map_atom: atom . ':' shift, and go to state 81 $default reduce using rule 77 (inline_map_atom) state 49 78 inline_map_atom: basic_mapping . $default reduce using rule 78 (inline_map_atom) state 50 73 inline_map: '{' in_inline_map . '}' 76 in_inline_map: in_inline_map . ',' inline_map_atom '}' shift, and go to state 84 ',' shift, and go to state 85 state 51 75 in_inline_map: inline_map_atom . $default reduce using rule 75 (in_inline_map) state 52 0 $accept: doc $end . $default accept state 53 9 ind_rep: YAML_ANCHOR . ind_rep 28 word_rep: YAML_ANCHOR . word_rep 45 top_imp_seq: YAML_ANCHOR . indent_sep in_implicit_seq 46 | YAML_ANCHOR . top_imp_seq 62 top_imp_map: YAML_ANCHOR . indent_sep in_implicit_map 63 | YAML_ANCHOR . top_imp_map YAML_ANCHOR shift, and go to state 53 YAML_ALIAS shift, and go to state 2 YAML_TRANSFER shift, and go to state 54 YAML_TAGURI shift, and go to state 55 YAML_ITRANSFER shift, and go to state 5 YAML_WORD shift, and go to state 6 YAML_PLAIN shift, and go to state 7 YAML_BLOCK shift, and go to state 8 YAML_IOPEN shift, and go to state 10 YAML_INDENT shift, and go to state 86 '[' shift, and go to state 11 '{' shift, and go to state 12 ind_rep go to state 23 indent_open go to state 16 indent_sep go to state 87 word_rep go to state 24 struct_rep go to state 18 implicit_seq go to state 19 top_imp_seq go to state 88 inline_seq go to state 20 implicit_map go to state 21 top_imp_map go to state 89 inline_map go to state 22 state 54 7 ind_rep: YAML_TRANSFER . ind_rep 25 word_rep: YAML_TRANSFER . word_rep 41 top_imp_seq: YAML_TRANSFER . indent_sep in_implicit_seq 42 | YAML_TRANSFER . top_imp_seq 58 top_imp_map: YAML_TRANSFER . indent_sep in_implicit_map 59 | YAML_TRANSFER . top_imp_map YAML_ANCHOR shift, and go to state 53 YAML_ALIAS shift, and go to state 2 YAML_TRANSFER shift, and go to state 54 YAML_TAGURI shift, and go to state 55 YAML_ITRANSFER shift, and go to state 5 YAML_WORD shift, and go to state 6 YAML_PLAIN shift, and go to state 7 YAML_BLOCK shift, and go to state 8 YAML_IOPEN shift, and go to state 10 YAML_INDENT shift, and go to state 86 '[' shift, and go to state 11 '{' shift, and go to state 12 ind_rep go to state 25 indent_open go to state 16 indent_sep go to state 90 word_rep go to state 26 struct_rep go to state 18 implicit_seq go to state 19 top_imp_seq go to state 91 inline_seq go to state 20 implicit_map go to state 21 top_imp_map go to state 92 inline_map go to state 22 state 55 8 ind_rep: YAML_TAGURI . ind_rep 26 word_rep: YAML_TAGURI . word_rep 43 top_imp_seq: YAML_TAGURI . indent_sep in_implicit_seq 44 | YAML_TAGURI . top_imp_seq 60 top_imp_map: YAML_TAGURI . indent_sep in_implicit_map 61 | YAML_TAGURI . top_imp_map YAML_ANCHOR shift, and go to state 53 YAML_ALIAS shift, and go to state 2 YAML_TRANSFER shift, and go to state 54 YAML_TAGURI shift, and go to state 55 YAML_ITRANSFER shift, and go to state 5 YAML_WORD shift, and go to state 6 YAML_PLAIN shift, and go to state 7 YAML_BLOCK shift, and go to state 8 YAML_IOPEN shift, and go to state 10 YAML_INDENT shift, and go to state 86 '[' shift, and go to state 11 '{' shift, and go to state 12 ind_rep go to state 27 indent_open go to state 16 indent_sep go to state 93 word_rep go to state 28 struct_rep go to state 18 implicit_seq go to state 19 top_imp_seq go to state 94 inline_seq go to state 20 implicit_map go to state 21 top_imp_map go to state 95 inline_map go to state 22 state 56 20 indent_open: indent_open YAML_INDENT . $default reduce using rule 20 (indent_open) state 57 40 basic_seq: '-' . atom_or_empty YAML_ANCHOR shift, and go to state 34 YAML_ALIAS shift, and go to state 2 YAML_TRANSFER shift, and go to state 35 YAML_TAGURI shift, and go to state 36 YAML_ITRANSFER shift, and go to state 37 YAML_WORD shift, and go to state 6 YAML_PLAIN shift, and go to state 7 YAML_BLOCK shift, and go to state 8 YAML_IOPEN shift, and go to state 10 '[' shift, and go to state 11 '{' shift, and go to state 12 $default reduce using rule 14 (empty) atom go to state 38 ind_rep go to state 15 atom_or_empty go to state 96 empty go to state 40 indent_open go to state 41 word_rep go to state 17 struct_rep go to state 18 implicit_seq go to state 19 inline_seq go to state 20 implicit_map go to state 21 inline_map go to state 22 state 58 65 complex_key: '?' . atom indent_sep YAML_ANCHOR shift, and go to state 1 YAML_ALIAS shift, and go to state 2 YAML_TRANSFER shift, and go to state 3 YAML_TAGURI shift, and go to state 4 YAML_ITRANSFER shift, and go to state 5 YAML_WORD shift, and go to state 6 YAML_PLAIN shift, and go to state 7 YAML_BLOCK shift, and go to state 8 YAML_IOPEN shift, and go to state 10 '[' shift, and go to state 11 '{' shift, and go to state 12 atom go to state 97 ind_rep go to state 15 indent_open go to state 16 word_rep go to state 17 struct_rep go to state 18 implicit_seq go to state 19 inline_seq go to state 20 implicit_map go to state 21 inline_map go to state 22 state 59 10 ind_rep: indent_open ind_rep . indent_flex_end YAML_INDENT shift, and go to state 86 YAML_IEND shift, and go to state 98 indent_sep go to state 99 indent_flex_end go to state 100 state 60 32 word_rep: indent_open word_rep . indent_flex_end 64 complex_key: word_rep . YAML_INDENT shift, and go to state 86 YAML_IEND shift, and go to state 98 $default reduce using rule 64 (complex_key) indent_sep go to state 99 indent_flex_end go to state 101 state 61 47 in_implicit_seq: basic_seq . $default reduce using rule 47 (in_implicit_seq) state 62 38 implicit_seq: indent_open top_imp_seq . indent_end YAML_IEND shift, and go to state 102 indent_end go to state 103 state 63 39 implicit_seq: indent_open in_implicit_seq . indent_end 48 in_implicit_seq: in_implicit_seq . indent_sep basic_seq 49 | in_implicit_seq . indent_sep YAML_INDENT shift, and go to state 86 YAML_IEND shift, and go to state 102 indent_end go to state 104 indent_sep go to state 105 state 64 56 implicit_map: indent_open top_imp_map . indent_end YAML_IEND shift, and go to state 102 indent_end go to state 106 state 65 67 complex_mapping: complex_key . ':' complex_value ':' shift, and go to state 107 state 66 68 in_implicit_map: complex_mapping . $default reduce using rule 68 (in_implicit_map) state 67 57 implicit_map: indent_open in_implicit_map . indent_end 69 in_implicit_map: in_implicit_map . indent_sep basic_seq 70 | in_implicit_map . indent_sep complex_mapping 71 | in_implicit_map . indent_sep YAML_INDENT shift, and go to state 86 YAML_IEND shift, and go to state 102 indent_end go to state 108 indent_sep go to state 109 state 68 32 word_rep: indent_open word_rep . indent_flex_end YAML_INDENT shift, and go to state 86 YAML_IEND shift, and go to state 98 indent_sep go to state 99 indent_flex_end go to state 101 state 69 18 empty: YAML_ANCHOR empty . $default reduce using rule 18 (empty) state 70 16 empty: YAML_TRANSFER empty . $default reduce using rule 16 (empty) state 71 17 empty: YAML_TAGURI empty . $default reduce using rule 17 (empty) state 72 18 empty: YAML_ANCHOR . empty 28 word_rep: YAML_ANCHOR . word_rep YAML_ANCHOR shift, and go to state 72 YAML_ALIAS shift, and go to state 2 YAML_TRANSFER shift, and go to state 73 YAML_TAGURI shift, and go to state 74 YAML_ITRANSFER shift, and go to state 37 YAML_WORD shift, and go to state 6 YAML_PLAIN shift, and go to state 7 YAML_IOPEN shift, and go to state 10 $default reduce using rule 14 (empty) empty go to state 69 indent_open go to state 76 word_rep go to state 24 state 73 16 empty: YAML_TRANSFER . empty 25 word_rep: YAML_TRANSFER . word_rep YAML_ANCHOR shift, and go to state 72 YAML_ALIAS shift, and go to state 2 YAML_TRANSFER shift, and go to state 73 YAML_TAGURI shift, and go to state 74 YAML_ITRANSFER shift, and go to state 37 YAML_WORD shift, and go to state 6 YAML_PLAIN shift, and go to state 7 YAML_IOPEN shift, and go to state 10 $default reduce using rule 14 (empty) empty go to state 70 indent_open go to state 76 word_rep go to state 26 state 74 17 empty: YAML_TAGURI . empty 26 word_rep: YAML_TAGURI . word_rep YAML_ANCHOR shift, and go to state 72 YAML_ALIAS shift, and go to state 2 YAML_TRANSFER shift, and go to state 73 YAML_TAGURI shift, and go to state 74 YAML_ITRANSFER shift, and go to state 37 YAML_WORD shift, and go to state 6 YAML_PLAIN shift, and go to state 7 YAML_IOPEN shift, and go to state 10 $default reduce using rule 14 (empty) empty go to state 71 indent_open go to state 76 word_rep go to state 28 state 75 15 empty: YAML_ITRANSFER empty . $default reduce using rule 15 (empty) state 76 13 empty: indent_open . empty indent_end 20 indent_open: indent_open . YAML_INDENT 32 word_rep: indent_open . word_rep indent_flex_end YAML_ANCHOR shift, and go to state 72 YAML_ALIAS shift, and go to state 2 YAML_TRANSFER shift, and go to state 73 YAML_TAGURI shift, and go to state 74 YAML_ITRANSFER shift, and go to state 37 YAML_WORD shift, and go to state 6 YAML_PLAIN shift, and go to state 7 YAML_IOPEN shift, and go to state 10 YAML_INDENT shift, and go to state 56 $default reduce using rule 14 (empty) empty go to state 80 indent_open go to state 76 word_rep go to state 68 state 77 9 ind_rep: YAML_ANCHOR . ind_rep 18 empty: YAML_ANCHOR . empty 28 word_rep: YAML_ANCHOR . word_rep 45 top_imp_seq: YAML_ANCHOR . indent_sep in_implicit_seq 46 | YAML_ANCHOR . top_imp_seq 62 top_imp_map: YAML_ANCHOR . indent_sep in_implicit_map 63 | YAML_ANCHOR . top_imp_map YAML_ANCHOR shift, and go to state 77 YAML_ALIAS shift, and go to state 2 YAML_TRANSFER shift, and go to state 78 YAML_TAGURI shift, and go to state 79 YAML_ITRANSFER shift, and go to state 37 YAML_WORD shift, and go to state 6 YAML_PLAIN shift, and go to state 7 YAML_BLOCK shift, and go to state 8 YAML_IOPEN shift, and go to state 10 YAML_INDENT shift, and go to state 86 '[' shift, and go to state 11 '{' shift, and go to state 12 $default reduce using rule 14 (empty) ind_rep go to state 23 empty go to state 69 indent_open go to state 41 indent_sep go to state 87 word_rep go to state 24 struct_rep go to state 18 implicit_seq go to state 19 top_imp_seq go to state 88 inline_seq go to state 20 implicit_map go to state 21 top_imp_map go to state 89 inline_map go to state 22 state 78 7 ind_rep: YAML_TRANSFER . ind_rep 16 empty: YAML_TRANSFER . empty 25 word_rep: YAML_TRANSFER . word_rep 41 top_imp_seq: YAML_TRANSFER . indent_sep in_implicit_seq 42 | YAML_TRANSFER . top_imp_seq 58 top_imp_map: YAML_TRANSFER . indent_sep in_implicit_map 59 | YAML_TRANSFER . top_imp_map YAML_ANCHOR shift, and go to state 77 YAML_ALIAS shift, and go to state 2 YAML_TRANSFER shift, and go to state 78 YAML_TAGURI shift, and go to state 79 YAML_ITRANSFER shift, and go to state 37 YAML_WORD shift, and go to state 6 YAML_PLAIN shift, and go to state 7 YAML_BLOCK shift, and go to state 8 YAML_IOPEN shift, and go to state 10 YAML_INDENT shift, and go to state 86 '[' shift, and go to state 11 '{' shift, and go to state 12 $default reduce using rule 14 (empty) ind_rep go to state 25 empty go to state 70 indent_open go to state 41 indent_sep go to state 90 word_rep go to state 26 struct_rep go to state 18 implicit_seq go to state 19 top_imp_seq go to state 91 inline_seq go to state 20 implicit_map go to state 21 top_imp_map go to state 92 inline_map go to state 22 state 79 8 ind_rep: YAML_TAGURI . ind_rep 17 empty: YAML_TAGURI . empty 26 word_rep: YAML_TAGURI . word_rep 43 top_imp_seq: YAML_TAGURI . indent_sep in_implicit_seq 44 | YAML_TAGURI . top_imp_seq 60 top_imp_map: YAML_TAGURI . indent_sep in_implicit_map 61 | YAML_TAGURI . top_imp_map YAML_ANCHOR shift, and go to state 77 YAML_ALIAS shift, and go to state 2 YAML_TRANSFER shift, and go to state 78 YAML_TAGURI shift, and go to state 79 YAML_ITRANSFER shift, and go to state 37 YAML_WORD shift, and go to state 6 YAML_PLAIN shift, and go to state 7 YAML_BLOCK shift, and go to state 8 YAML_IOPEN shift, and go to state 10 YAML_INDENT shift, and go to state 86 '[' shift, and go to state 11 '{' shift, and go to state 12 $default reduce using rule 14 (empty) ind_rep go to state 27 empty go to state 71 indent_open go to state 41 indent_sep go to state 93 word_rep go to state 28 struct_rep go to state 18 implicit_seq go to state 19 top_imp_seq go to state 94 inline_seq go to state 20 implicit_map go to state 21 top_imp_map go to state 95 inline_map go to state 22 state 80 13 empty: indent_open empty . indent_end YAML_IEND shift, and go to state 102 indent_end go to state 110 state 81 72 basic_mapping: atom ':' . atom_or_empty YAML_ANCHOR shift, and go to state 34 YAML_ALIAS shift, and go to state 2 YAML_TRANSFER shift, and go to state 35 YAML_TAGURI shift, and go to state 36 YAML_ITRANSFER shift, and go to state 37 YAML_WORD shift, and go to state 6 YAML_PLAIN shift, and go to state 7 YAML_BLOCK shift, and go to state 8 YAML_IOPEN shift, and go to state 10 '[' shift, and go to state 11 '{' shift, and go to state 12 $default reduce using rule 14 (empty) atom go to state 38 ind_rep go to state 15 atom_or_empty go to state 111 empty go to state 40 indent_open go to state 41 word_rep go to state 17 struct_rep go to state 18 implicit_seq go to state 19 inline_seq go to state 20 implicit_map go to state 21 inline_map go to state 22 state 82 50 inline_seq: '[' in_inline_seq ']' . $default reduce using rule 50 (inline_seq) state 83 53 in_inline_seq: in_inline_seq ',' . inline_seq_atom YAML_ANCHOR shift, and go to state 1 YAML_ALIAS shift, and go to state 2 YAML_TRANSFER shift, and go to state 3 YAML_TAGURI shift, and go to state 4 YAML_ITRANSFER shift, and go to state 5 YAML_WORD shift, and go to state 6 YAML_PLAIN shift, and go to state 7 YAML_BLOCK shift, and go to state 8 YAML_IOPEN shift, and go to state 10 '[' shift, and go to state 11 '{' shift, and go to state 12 atom go to state 43 ind_rep go to state 15 indent_open go to state 16 word_rep go to state 17 struct_rep go to state 18 implicit_seq go to state 19 inline_seq go to state 20 inline_seq_atom go to state 112 implicit_map go to state 21 basic_mapping go to state 46 inline_map go to state 22 state 84 73 inline_map: '{' in_inline_map '}' . $default reduce using rule 73 (inline_map) state 85 76 in_inline_map: in_inline_map ',' . inline_map_atom YAML_ANCHOR shift, and go to state 1 YAML_ALIAS shift, and go to state 2 YAML_TRANSFER shift, and go to state 3 YAML_TAGURI shift, and go to state 4 YAML_ITRANSFER shift, and go to state 5 YAML_WORD shift, and go to state 6 YAML_PLAIN shift, and go to state 7 YAML_BLOCK shift, and go to state 8 YAML_IOPEN shift, and go to state 10 '[' shift, and go to state 11 '{' shift, and go to state 12 atom go to state 48 ind_rep go to state 15 indent_open go to state 16 word_rep go to state 17 struct_rep go to state 18 implicit_seq go to state 19 inline_seq go to state 20 implicit_map go to state 21 basic_mapping go to state 49 inline_map go to state 22 inline_map_atom go to state 113 state 86 22 indent_sep: YAML_INDENT . $default reduce using rule 22 (indent_sep) state 87 45 top_imp_seq: YAML_ANCHOR indent_sep . in_implicit_seq 62 top_imp_map: YAML_ANCHOR indent_sep . in_implicit_map YAML_ANCHOR shift, and go to state 29 YAML_ALIAS shift, and go to state 2 YAML_TRANSFER shift, and go to state 30 YAML_TAGURI shift, and go to state 31 YAML_ITRANSFER shift, and go to state 5 YAML_WORD shift, and go to state 6 YAML_PLAIN shift, and go to state 7 YAML_IOPEN shift, and go to state 10 '-' shift, and go to state 57 '?' shift, and go to state 58 indent_open go to state 32 word_rep go to state 114 basic_seq go to state 61 in_implicit_seq go to state 115 complex_key go to state 65 complex_mapping go to state 66 in_implicit_map go to state 116 state 88 46 top_imp_seq: YAML_ANCHOR top_imp_seq . $default reduce using rule 46 (top_imp_seq) state 89 63 top_imp_map: YAML_ANCHOR top_imp_map . $default reduce using rule 63 (top_imp_map) state 90 41 top_imp_seq: YAML_TRANSFER indent_sep . in_implicit_seq 58 top_imp_map: YAML_TRANSFER indent_sep . in_implicit_map YAML_ANCHOR shift, and go to state 29 YAML_ALIAS shift, and go to state 2 YAML_TRANSFER shift, and go to state 30 YAML_TAGURI shift, and go to state 31 YAML_ITRANSFER shift, and go to state 5 YAML_WORD shift, and go to state 6 YAML_PLAIN shift, and go to state 7 YAML_IOPEN shift, and go to state 10 '-' shift, and go to state 57 '?' shift, and go to state 58 indent_open go to state 32 word_rep go to state 114 basic_seq go to state 61 in_implicit_seq go to state 117 complex_key go to state 65 complex_mapping go to state 66 in_implicit_map go to state 118 state 91 42 top_imp_seq: YAML_TRANSFER top_imp_seq . $default reduce using rule 42 (top_imp_seq) state 92 59 top_imp_map: YAML_TRANSFER top_imp_map . $default reduce using rule 59 (top_imp_map) state 93 43 top_imp_seq: YAML_TAGURI indent_sep . in_implicit_seq 60 top_imp_map: YAML_TAGURI indent_sep . in_implicit_map YAML_ANCHOR shift, and go to state 29 YAML_ALIAS shift, and go to state 2 YAML_TRANSFER shift, and go to state 30 YAML_TAGURI shift, and go to state 31 YAML_ITRANSFER shift, and go to state 5 YAML_WORD shift, and go to state 6 YAML_PLAIN shift, and go to state 7 YAML_IOPEN shift, and go to state 10 '-' shift, and go to state 57 '?' shift, and go to state 58 indent_open go to state 32 word_rep go to state 114 basic_seq go to state 61 in_implicit_seq go to state 119 complex_key go to state 65 complex_mapping go to state 66 in_implicit_map go to state 120 state 94 44 top_imp_seq: YAML_TAGURI top_imp_seq . $default reduce using rule 44 (top_imp_seq) state 95 61 top_imp_map: YAML_TAGURI top_imp_map . $default reduce using rule 61 (top_imp_map) state 96 40 basic_seq: '-' atom_or_empty . $default reduce using rule 40 (basic_seq) state 97 65 complex_key: '?' atom . indent_sep YAML_INDENT shift, and go to state 86 indent_sep go to state 121 state 98 23 indent_flex_end: YAML_IEND . $default reduce using rule 23 (indent_flex_end) state 99 24 indent_flex_end: indent_sep . indent_flex_end YAML_INDENT shift, and go to state 86 YAML_IEND shift, and go to state 98 indent_sep go to state 99 indent_flex_end go to state 122 state 100 10 ind_rep: indent_open ind_rep indent_flex_end . $default reduce using rule 10 (ind_rep) state 101 32 word_rep: indent_open word_rep indent_flex_end . $default reduce using rule 32 (word_rep) state 102 21 indent_end: YAML_IEND . $default reduce using rule 21 (indent_end) state 103 38 implicit_seq: indent_open top_imp_seq indent_end . $default reduce using rule 38 (implicit_seq) state 104 39 implicit_seq: indent_open in_implicit_seq indent_end . $default reduce using rule 39 (implicit_seq) state 105 48 in_implicit_seq: in_implicit_seq indent_sep . basic_seq 49 | in_implicit_seq indent_sep . '-' shift, and go to state 57 $default reduce using rule 49 (in_implicit_seq) basic_seq go to state 123 state 106 56 implicit_map: indent_open top_imp_map indent_end . $default reduce using rule 56 (implicit_map) state 107 67 complex_mapping: complex_key ':' . complex_value YAML_ANCHOR shift, and go to state 34 YAML_ALIAS shift, and go to state 2 YAML_TRANSFER shift, and go to state 35 YAML_TAGURI shift, and go to state 36 YAML_ITRANSFER shift, and go to state 37 YAML_WORD shift, and go to state 6 YAML_PLAIN shift, and go to state 7 YAML_BLOCK shift, and go to state 8 YAML_IOPEN shift, and go to state 10 '[' shift, and go to state 11 '{' shift, and go to state 12 $default reduce using rule 14 (empty) atom go to state 38 ind_rep go to state 15 atom_or_empty go to state 124 empty go to state 40 indent_open go to state 41 word_rep go to state 17 struct_rep go to state 18 implicit_seq go to state 19 inline_seq go to state 20 implicit_map go to state 21 complex_value go to state 125 inline_map go to state 22 state 108 57 implicit_map: indent_open in_implicit_map indent_end . $default reduce using rule 57 (implicit_map) state 109 69 in_implicit_map: in_implicit_map indent_sep . basic_seq 70 | in_implicit_map indent_sep . complex_mapping 71 | in_implicit_map indent_sep . YAML_ANCHOR shift, and go to state 29 YAML_ALIAS shift, and go to state 2 YAML_TRANSFER shift, and go to state 30 YAML_TAGURI shift, and go to state 31 YAML_ITRANSFER shift, and go to state 5 YAML_WORD shift, and go to state 6 YAML_PLAIN shift, and go to state 7 YAML_IOPEN shift, and go to state 10 '-' shift, and go to state 57 '?' shift, and go to state 58 $default reduce using rule 71 (in_implicit_map) indent_open go to state 32 word_rep go to state 114 basic_seq go to state 126 complex_key go to state 65 complex_mapping go to state 127 state 110 13 empty: indent_open empty indent_end . $default reduce using rule 13 (empty) state 111 72 basic_mapping: atom ':' atom_or_empty . $default reduce using rule 72 (basic_mapping) state 112 53 in_inline_seq: in_inline_seq ',' inline_seq_atom . $default reduce using rule 53 (in_inline_seq) state 113 76 in_inline_map: in_inline_map ',' inline_map_atom . $default reduce using rule 76 (in_inline_map) state 114 64 complex_key: word_rep . $default reduce using rule 64 (complex_key) state 115 45 top_imp_seq: YAML_ANCHOR indent_sep in_implicit_seq . 48 in_implicit_seq: in_implicit_seq . indent_sep basic_seq 49 | in_implicit_seq . indent_sep YAML_INDENT shift, and go to state 86 $default reduce using rule 45 (top_imp_seq) indent_sep go to state 105 state 116 62 top_imp_map: YAML_ANCHOR indent_sep in_implicit_map . 69 in_implicit_map: in_implicit_map . indent_sep basic_seq 70 | in_implicit_map . indent_sep complex_mapping 71 | in_implicit_map . indent_sep YAML_INDENT shift, and go to state 86 $default reduce using rule 62 (top_imp_map) indent_sep go to state 109 state 117 41 top_imp_seq: YAML_TRANSFER indent_sep in_implicit_seq . 48 in_implicit_seq: in_implicit_seq . indent_sep basic_seq 49 | in_implicit_seq . indent_sep YAML_INDENT shift, and go to state 86 $default reduce using rule 41 (top_imp_seq) indent_sep go to state 105 state 118 58 top_imp_map: YAML_TRANSFER indent_sep in_implicit_map . 69 in_implicit_map: in_implicit_map . indent_sep basic_seq 70 | in_implicit_map . indent_sep complex_mapping 71 | in_implicit_map . indent_sep YAML_INDENT shift, and go to state 86 $default reduce using rule 58 (top_imp_map) indent_sep go to state 109 state 119 43 top_imp_seq: YAML_TAGURI indent_sep in_implicit_seq . 48 in_implicit_seq: in_implicit_seq . indent_sep basic_seq 49 | in_implicit_seq . indent_sep YAML_INDENT shift, and go to state 86 $default reduce using rule 43 (top_imp_seq) indent_sep go to state 105 state 120 60 top_imp_map: YAML_TAGURI indent_sep in_implicit_map . 69 in_implicit_map: in_implicit_map . indent_sep basic_seq 70 | in_implicit_map . indent_sep complex_mapping 71 | in_implicit_map . indent_sep YAML_INDENT shift, and go to state 86 $default reduce using rule 60 (top_imp_map) indent_sep go to state 109 state 121 65 complex_key: '?' atom indent_sep . $default reduce using rule 65 (complex_key) state 122 24 indent_flex_end: indent_sep indent_flex_end . $default reduce using rule 24 (indent_flex_end) state 123 48 in_implicit_seq: in_implicit_seq indent_sep basic_seq . $default reduce using rule 48 (in_implicit_seq) state 124 66 complex_value: atom_or_empty . $default reduce using rule 66 (complex_value) state 125 67 complex_mapping: complex_key ':' complex_value . $default reduce using rule 67 (complex_mapping) state 126 69 in_implicit_map: in_implicit_map indent_sep basic_seq . $default reduce using rule 69 (in_implicit_map) state 127 70 in_implicit_map: in_implicit_map indent_sep complex_mapping . $default reduce using rule 70 (in_implicit_map) ruby-mkrf-0.2.3.orig/test/sample_files/syck-0.55/lib/Makefile.am0000644000000000000000000000072111672453175022664 0ustar rootrootINCLUDES = -I$(top_srcdir) LEXLIB = @LEXLIB@ AM_YFLAGS = -d -t -v -p syck lib_LIBRARIES = libsyck.a include_HEADERS = syck.h syck_st.h libsyck_a_SOURCES = \ emitter.c \ handler.c \ node.c \ syck.c \ syck_st.c \ gram.y \ bytecode.re \ yaml2byte.c \ token.re \ implicit.re # libsyck_a_LIBADD = $(LEXLIB) REC = re2c .re.c: $(REC) $< > $@.new && mv $@.new $@ ruby-mkrf-0.2.3.orig/test/sample_files/syck-0.55/lib/gram.h0000644000000000000000000000432611672453175021734 0ustar rootroot/* A Bison parser, made by GNU Bison 1.875d. */ /* Skeleton parser for Yacc-like parsing with Bison, Copyright (C) 1984, 1989, 1990, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc. This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, write to the Free Software Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. */ /* As a special exception, when this file is copied by Bison into a Bison output file, you may use that output file without restriction. This special exception was added by the Free Software Foundation in version 1.24 of Bison. */ /* Tokens. */ #ifndef YYTOKENTYPE # define YYTOKENTYPE /* Put the tokens into the symbol table, so that GDB and other debuggers know about them. */ enum yytokentype { YAML_ANCHOR = 258, YAML_ALIAS = 259, YAML_TRANSFER = 260, YAML_TAGURI = 261, YAML_ITRANSFER = 262, YAML_WORD = 263, YAML_PLAIN = 264, YAML_BLOCK = 265, YAML_DOCSEP = 266, YAML_IOPEN = 267, YAML_INDENT = 268, YAML_IEND = 269 }; #endif #define YAML_ANCHOR 258 #define YAML_ALIAS 259 #define YAML_TRANSFER 260 #define YAML_TAGURI 261 #define YAML_ITRANSFER 262 #define YAML_WORD 263 #define YAML_PLAIN 264 #define YAML_BLOCK 265 #define YAML_DOCSEP 266 #define YAML_IOPEN 267 #define YAML_INDENT 268 #define YAML_IEND 269 #if ! defined (YYSTYPE) && ! defined (YYSTYPE_IS_DECLARED) #line 35 "gram.y" typedef union YYSTYPE { SYMID nodeId; SyckNode *nodeData; char *name; } YYSTYPE; /* Line 1285 of yacc.c. */ #line 71 "gram.h" # define yystype YYSTYPE /* obsolescent; will be withdrawn */ # define YYSTYPE_IS_DECLARED 1 # define YYSTYPE_IS_TRIVIAL 1 #endif ruby-mkrf-0.2.3.orig/test/sample_files/syck-0.55/lib/syck_st.c0000644000000000000000000002545311672453175022464 0ustar rootroot/* This is a public domain general purpose hash table package written by Peter Moore @ UCB. */ /* static char sccsid[] = "@(#) st.c 5.1 89/12/14 Crucible"; */ #include "config.h" #include #include #include "syck_st.h" #ifdef NT #include #endif #define SIZE32 4 #if SIZEOF_LONG == SIZE32 typedef long I32; typedef unsigned long U32; #define NUM2I32(x) NUM2LONG(x) #define NUM2U32(x) NUM2ULONG(x) #elif SIZEOF_INT == SIZE32 typedef int I32; typedef unsigned int U32; #define NUM2I32(x) NUM2INT(x) #define NUM2U32(x) NUM2UINT(x) #endif typedef struct st_table_entry st_table_entry; struct st_table_entry { unsigned int hash; char *key; char *record; st_table_entry *next; }; #define ST_DEFAULT_MAX_DENSITY 5 #define ST_DEFAULT_INIT_TABLE_SIZE 11 /* * DEFAULT_MAX_DENSITY is the default for the largest we allow the * average number of items per bin before increasing the number of * bins * * DEFAULT_INIT_TABLE_SIZE is the default for the number of bins * allocated initially * */ static int numcmp(); static int numhash(); static struct st_hash_type type_numhash = { numcmp, numhash, }; extern int strcmp(); static int strhash(); static struct st_hash_type type_strhash = { strcmp, strhash, }; static void rehash(); #define alloc(type) (type*)malloc((unsigned)sizeof(type)) #define Calloc(n,s) (char*)calloc((n),(s)) #define EQUAL(table,x,y) ((x)==(y) || (*table->type->compare)((x),(y)) == 0) #define do_hash(key,table) (unsigned int)(*(table)->type->hash)((key)) #define do_hash_bin(key,table) (do_hash(key, table)%(table)->num_bins) /* * MINSIZE is the minimum size of a dictionary. */ #define MINSIZE 8 /* Table of prime numbers 2^n+a, 2<=n<=30. */ static long primes[] = { 8 + 3, 16 + 3, 32 + 5, 64 + 3, 128 + 3, 256 + 27, 512 + 9, 1024 + 9, 2048 + 5, 4096 + 3, 8192 + 27, 16384 + 43, 32768 + 3, 65536 + 45, 131072 + 29, 262144 + 3, 524288 + 21, 1048576 + 7, 2097152 + 17, 4194304 + 15, 8388608 + 9, 16777216 + 43, 33554432 + 35, 67108864 + 15, 134217728 + 29, 268435456 + 3, 536870912 + 11, 1073741824 + 85, 0 }; static int new_size(size) int size; { int i; #if 0 for (i=3; i<31; i++) { if ((1< size) return 1< size) return primes[i]; } /* Ran out of polynomials */ return -1; /* should raise exception */ #endif } #ifdef HASH_LOG static int collision = 0; static int init_st = 0; static void stat_col() { FILE *f = fopen("/tmp/col", "w"); fprintf(f, "collision: %d\n", collision); fclose(f); } #endif st_table* st_init_table_with_size(type, size) struct st_hash_type *type; int size; { st_table *tbl; #ifdef HASH_LOG if (init_st == 0) { init_st = 1; atexit(stat_col); } #endif size = new_size(size); /* round up to prime number */ tbl = alloc(st_table); tbl->type = type; tbl->num_entries = 0; tbl->num_bins = size; tbl->bins = (st_table_entry **)Calloc(size, sizeof(st_table_entry*)); return tbl; } st_table* st_init_table(type) struct st_hash_type *type; { return st_init_table_with_size(type, 0); } st_table* st_init_numtable() { return st_init_table(&type_numhash); } st_table* st_init_numtable_with_size(size) int size; { return st_init_table_with_size(&type_numhash, size); } st_table* st_init_strtable() { return st_init_table(&type_strhash); } st_table* st_init_strtable_with_size(size) int size; { return st_init_table_with_size(&type_strhash, size); } void st_free_table(table) st_table *table; { register st_table_entry *ptr, *next; int i; for(i = 0; i < table->num_bins; i++) { ptr = table->bins[i]; while (ptr != 0) { next = ptr->next; free(ptr); ptr = next; } } free(table->bins); free(table); } #define PTR_NOT_EQUAL(table, ptr, hash_val, key) \ ((ptr) != 0 && (ptr->hash != (hash_val) || !EQUAL((table), (key), (ptr)->key))) #ifdef HASH_LOG #define COLLISION collision++ #else #define COLLISION #endif #define FIND_ENTRY(table, ptr, hash_val, bin_pos) do {\ bin_pos = hash_val%(table)->num_bins;\ ptr = (table)->bins[bin_pos];\ if (PTR_NOT_EQUAL(table, ptr, hash_val, key)) {\ COLLISION;\ while (PTR_NOT_EQUAL(table, ptr->next, hash_val, key)) {\ ptr = ptr->next;\ }\ ptr = ptr->next;\ }\ } while (0) int st_lookup(table, key, value) st_table *table; register char *key; char **value; { unsigned int hash_val, bin_pos; register st_table_entry *ptr; hash_val = do_hash(key, table); FIND_ENTRY(table, ptr, hash_val, bin_pos); if (ptr == 0) { return 0; } else { if (value != 0) *value = ptr->record; return 1; } } #define ADD_DIRECT(table, key, value, hash_val, bin_pos)\ do {\ st_table_entry *entry;\ if (table->num_entries/(table->num_bins) > ST_DEFAULT_MAX_DENSITY) {\ rehash(table);\ bin_pos = hash_val % table->num_bins;\ }\ \ entry = alloc(st_table_entry);\ \ entry->hash = hash_val;\ entry->key = key;\ entry->record = value;\ entry->next = table->bins[bin_pos];\ table->bins[bin_pos] = entry;\ table->num_entries++;\ } while (0) int st_insert(table, key, value) register st_table *table; register char *key; char *value; { unsigned int hash_val, bin_pos; register st_table_entry *ptr; hash_val = do_hash(key, table); FIND_ENTRY(table, ptr, hash_val, bin_pos); if (ptr == 0) { ADD_DIRECT(table, key, value, hash_val, bin_pos); return 0; } else { ptr->record = value; return 1; } } void st_add_direct(table, key, value) st_table *table; char *key; char *value; { unsigned int hash_val, bin_pos; hash_val = do_hash(key, table); bin_pos = hash_val % table->num_bins; ADD_DIRECT(table, key, value, hash_val, bin_pos); } static void rehash(table) register st_table *table; { register st_table_entry *ptr, *next, **new_bins; int i, old_num_bins = table->num_bins, new_num_bins; unsigned int hash_val; new_num_bins = new_size(old_num_bins+1); new_bins = (st_table_entry**)Calloc(new_num_bins, sizeof(st_table_entry*)); for(i = 0; i < old_num_bins; i++) { ptr = table->bins[i]; while (ptr != 0) { next = ptr->next; hash_val = ptr->hash % new_num_bins; ptr->next = new_bins[hash_val]; new_bins[hash_val] = ptr; ptr = next; } } free(table->bins); table->num_bins = new_num_bins; table->bins = new_bins; } st_table* st_copy(old_table) st_table *old_table; { st_table *new_table; st_table_entry *ptr, *entry; int i, num_bins = old_table->num_bins; new_table = alloc(st_table); if (new_table == 0) { return 0; } *new_table = *old_table; new_table->bins = (st_table_entry**) Calloc((unsigned)num_bins, sizeof(st_table_entry*)); if (new_table->bins == 0) { free(new_table); return 0; } for(i = 0; i < num_bins; i++) { new_table->bins[i] = 0; ptr = old_table->bins[i]; while (ptr != 0) { entry = alloc(st_table_entry); if (entry == 0) { free(new_table->bins); free(new_table); return 0; } *entry = *ptr; entry->next = new_table->bins[i]; new_table->bins[i] = entry; ptr = ptr->next; } } return new_table; } int st_delete(table, key, value) register st_table *table; register char **key; char **value; { unsigned int hash_val; st_table_entry *tmp; register st_table_entry *ptr; hash_val = do_hash_bin(*key, table); ptr = table->bins[hash_val]; if (ptr == 0) { if (value != 0) *value = 0; return 0; } if (EQUAL(table, *key, ptr->key)) { table->bins[hash_val] = ptr->next; table->num_entries--; if (value != 0) *value = ptr->record; *key = ptr->key; free(ptr); return 1; } for(; ptr->next != 0; ptr = ptr->next) { if (EQUAL(table, ptr->next->key, *key)) { tmp = ptr->next; ptr->next = ptr->next->next; table->num_entries--; if (value != 0) *value = tmp->record; *key = tmp->key; free(tmp); return 1; } } return 0; } int st_delete_safe(table, key, value, never) register st_table *table; register char **key; char **value; char *never; { unsigned int hash_val; register st_table_entry *ptr; hash_val = do_hash_bin(*key, table); ptr = table->bins[hash_val]; if (ptr == 0) { if (value != 0) *value = 0; return 0; } for(; ptr != 0; ptr = ptr->next) { if ((ptr->key != never) && EQUAL(table, ptr->key, *key)) { table->num_entries--; *key = ptr->key; if (value != 0) *value = ptr->record; ptr->key = ptr->record = never; return 1; } } return 0; } static int delete_never(key, value, never) char *key, *value, *never; { if (value == never) return ST_DELETE; return ST_CONTINUE; } void st_cleanup_safe(table, never) st_table *table; char *never; { int num_entries = table->num_entries; st_foreach(table, delete_never, never); table->num_entries = num_entries; } void st_foreach(table, func, arg) st_table *table; enum st_retval (*func)(); char *arg; { st_table_entry *ptr, *last, *tmp; enum st_retval retval; int i; for(i = 0; i < table->num_bins; i++) { last = 0; for(ptr = table->bins[i]; ptr != 0;) { retval = (*func)(ptr->key, ptr->record, arg); switch (retval) { case ST_CONTINUE: last = ptr; ptr = ptr->next; break; case ST_STOP: return; case ST_DELETE: tmp = ptr; if (last == 0) { table->bins[i] = ptr->next; } else { last->next = ptr->next; } ptr = ptr->next; free(tmp); table->num_entries--; } } } } static int strhash(string) register char *string; { register int c; #ifdef HASH_ELFHASH register unsigned int h = 0, g; while ((c = *string++) != '\0') { h = ( h << 4 ) + c; if ( g = h & 0xF0000000 ) h ^= g >> 24; h &= ~g; } return h; #elif HASH_PERL register int val = 0; while ((c = *string++) != '\0') { val = val*33 + c; } return val + (val>>5); #elif HASH_JENKINS register const unsigned char *s_PeRlHaSh = (const unsigned char *)string; register U32 hash_PeRlHaSh = 0; while ((c = *s_PeRlHaSh++) != '\0') { hash_PeRlHaSh += c; hash_PeRlHaSh += (hash_PeRlHaSh << 10); hash_PeRlHaSh ^= (hash_PeRlHaSh >> 6); } hash_PeRlHaSh += (hash_PeRlHaSh << 3); hash_PeRlHaSh ^= (hash_PeRlHaSh >> 11); return (hash_PeRlHaSh + (hash_PeRlHaSh << 15)); #else register int val = 0; while ((c = *string++) != '\0') { val = val*997 + c; } return val + (val>>5); #endif } static int numcmp(x, y) long x, y; { return x != y; } static int numhash(n) long n; { return n; } ruby-mkrf-0.2.3.orig/test/sample_files/syck-0.55/lib/gram.y0000644000000000000000000003211111672453175021746 0ustar rootroot/* * gram.y * * $Author: why $ * $Date: 2005/04/13 06:27:54 $ * * Copyright (C) 2003 why the lucky stiff */ %start doc %pure-parser %{ #include "syck.h" void apply_seq_in_map( SyckParser *parser, SyckNode *n ); #define YYPARSE_PARAM parser #define YYLEX_PARAM parser #define NULL_NODE(parser, node) \ SyckNode *node = syck_new_str( "", scalar_plain ); \ if ( ((SyckParser *)parser)->taguri_expansion == 1 ) \ { \ node->type_id = syck_taguri( YAML_DOMAIN, "null", 4 ); \ } \ else \ { \ node->type_id = syck_strndup( "null", 4 ); \ } %} %union { SYMID nodeId; SyckNode *nodeData; char *name; }; %token YAML_ANCHOR YAML_ALIAS YAML_TRANSFER YAML_TAGURI YAML_ITRANSFER %token YAML_WORD YAML_PLAIN YAML_BLOCK %token YAML_DOCSEP YAML_IOPEN YAML_INDENT YAML_IEND %type doc basic_seq %type atom word_rep ind_rep struct_rep atom_or_empty empty %type implicit_seq inline_seq implicit_map inline_map inline_seq_atom inline_map_atom %type top_imp_seq in_implicit_seq in_inline_seq basic_mapping complex_key complex_value %type top_imp_map in_implicit_map in_inline_map complex_mapping %left '-' ':' %left '[' ']' '{' '}' ',' '?' %% doc : atom { ((SyckParser *)parser)->root = syck_hdlr_add_node( (SyckParser *)parser, $1 ); } | YAML_DOCSEP atom_or_empty { ((SyckParser *)parser)->root = syck_hdlr_add_node( (SyckParser *)parser, $2 ); } | { ((SyckParser *)parser)->eof = 1; } ; atom : word_rep | ind_rep ; ind_rep : struct_rep | YAML_TRANSFER ind_rep { syck_add_transfer( $1, $2, ((SyckParser *)parser)->taguri_expansion ); $$ = $2; } | YAML_TAGURI ind_rep { syck_add_transfer( $1, $2, 0 ); $$ = $2; } | YAML_ANCHOR ind_rep { /* * _Anchors_: The language binding must keep a separate symbol table * for anchors. The actual ID in the symbol table is returned to the * higher nodes, though. */ $$ = syck_hdlr_add_anchor( (SyckParser *)parser, $1, $2 ); } | indent_open ind_rep indent_flex_end { $$ = $2; } ; atom_or_empty : atom | empty ; empty : indent_open empty indent_end { $$ = $2; } | { NULL_NODE( parser, n ); $$ = n; } | YAML_ITRANSFER empty { if ( ((SyckParser *)parser)->implicit_typing == 1 ) { try_tag_implicit( $2, ((SyckParser *)parser)->taguri_expansion ); } $$ = $2; } | YAML_TRANSFER empty { syck_add_transfer( $1, $2, ((SyckParser *)parser)->taguri_expansion ); $$ = $2; } | YAML_TAGURI empty { syck_add_transfer( $1, $2, 0 ); $$ = $2; } | YAML_ANCHOR empty { /* * _Anchors_: The language binding must keep a separate symbol table * for anchors. The actual ID in the symbol table is returned to the * higher nodes, though. */ $$ = syck_hdlr_add_anchor( (SyckParser *)parser, $1, $2 ); } ; /* * Indentation abstractions */ indent_open : YAML_IOPEN | indent_open YAML_INDENT ; indent_end : YAML_IEND ; indent_sep : YAML_INDENT ; indent_flex_end : YAML_IEND | indent_sep indent_flex_end ; /* * Words are broken out to distinguish them * as keys in implicit maps and valid elements * for the inline structures */ word_rep : YAML_TRANSFER word_rep { syck_add_transfer( $1, $2, ((SyckParser *)parser)->taguri_expansion ); $$ = $2; } | YAML_TAGURI word_rep { syck_add_transfer( $1, $2, 0 ); $$ = $2; } | YAML_ITRANSFER word_rep { if ( ((SyckParser *)parser)->implicit_typing == 1 ) { try_tag_implicit( $2, ((SyckParser *)parser)->taguri_expansion ); } $$ = $2; } | YAML_ANCHOR word_rep { $$ = syck_hdlr_add_anchor( (SyckParser *)parser, $1, $2 ); } | YAML_ALIAS { /* * _Aliases_: The anchor symbol table is scanned for the anchor name. * The anchor's ID in the language's symbol table is returned. */ $$ = syck_hdlr_get_anchor( (SyckParser *)parser, $1 ); } | YAML_WORD { SyckNode *n = $1; if ( ((SyckParser *)parser)->taguri_expansion == 1 ) { n->type_id = syck_taguri( YAML_DOMAIN, "str", 3 ); } else { n->type_id = syck_strndup( "str", 3 ); } $$ = n; } | YAML_PLAIN | indent_open word_rep indent_flex_end { $$ = $2; } ; /* * Any of these structures can be used as * complex keys */ struct_rep : YAML_BLOCK | implicit_seq | inline_seq | implicit_map | inline_map ; /* * Implicit sequence */ implicit_seq : indent_open top_imp_seq indent_end { $$ = $2; } | indent_open in_implicit_seq indent_end { $$ = $2; } ; basic_seq : '-' atom_or_empty { $$ = syck_hdlr_add_node( (SyckParser *)parser, $2 ); } ; top_imp_seq : YAML_TRANSFER indent_sep in_implicit_seq { syck_add_transfer( $1, $3, ((SyckParser *)parser)->taguri_expansion ); $$ = $3; } | YAML_TRANSFER top_imp_seq { syck_add_transfer( $1, $2, ((SyckParser *)parser)->taguri_expansion ); $$ = $2; } | YAML_TAGURI indent_sep in_implicit_seq { syck_add_transfer( $1, $3, 0 ); $$ = $3; } | YAML_TAGURI top_imp_seq { syck_add_transfer( $1, $2, 0 ); $$ = $2; } | YAML_ANCHOR indent_sep in_implicit_seq { $$ = syck_hdlr_add_anchor( (SyckParser *)parser, $1, $3 ); } | YAML_ANCHOR top_imp_seq { $$ = syck_hdlr_add_anchor( (SyckParser *)parser, $1, $2 ); } ; in_implicit_seq : basic_seq { $$ = syck_new_seq( $1 ); } | in_implicit_seq indent_sep basic_seq { syck_seq_add( $1, $3 ); $$ = $1; } | in_implicit_seq indent_sep { $$ = $1; } ; /* * Inline sequences */ inline_seq : '[' in_inline_seq ']' { $$ = $2; } | '[' ']' { $$ = syck_alloc_seq(); } ; in_inline_seq : inline_seq_atom { $$ = syck_new_seq( syck_hdlr_add_node( (SyckParser *)parser, $1 ) ); } | in_inline_seq ',' inline_seq_atom { syck_seq_add( $1, syck_hdlr_add_node( (SyckParser *)parser, $3 ) ); $$ = $1; } ; inline_seq_atom : atom | basic_mapping ; /* * Implicit maps */ implicit_map : indent_open top_imp_map indent_end { apply_seq_in_map( (SyckParser *)parser, $2 ); $$ = $2; } | indent_open in_implicit_map indent_end { apply_seq_in_map( (SyckParser *)parser, $2 ); $$ = $2; } ; top_imp_map : YAML_TRANSFER indent_sep in_implicit_map { syck_add_transfer( $1, $3, ((SyckParser *)parser)->taguri_expansion ); $$ = $3; } | YAML_TRANSFER top_imp_map { syck_add_transfer( $1, $2, ((SyckParser *)parser)->taguri_expansion ); $$ = $2; } | YAML_TAGURI indent_sep in_implicit_map { syck_add_transfer( $1, $3, 0 ); $$ = $3; } | YAML_TAGURI top_imp_map { syck_add_transfer( $1, $2, 0 ); $$ = $2; } | YAML_ANCHOR indent_sep in_implicit_map { $$ = syck_hdlr_add_anchor( (SyckParser *)parser, $1, $3 ); } | YAML_ANCHOR top_imp_map { $$ = syck_hdlr_add_anchor( (SyckParser *)parser, $1, $2 ); } ; complex_key : word_rep | '?' atom indent_sep { $$ = $2; } ; complex_value : atom_or_empty ; /* Default needs to be added to SyckSeq i think... | '=' ':' atom { result = [ :DEFAULT, val[2] ] } */ complex_mapping : complex_key ':' complex_value { $$ = syck_new_map( syck_hdlr_add_node( (SyckParser *)parser, $1 ), syck_hdlr_add_node( (SyckParser *)parser, $3 ) ); } /* | '?' atom { NULL_NODE( parser, n ); $$ = syck_new_map( syck_hdlr_add_node( (SyckParser *)parser, $2 ), syck_hdlr_add_node( (SyckParser *)parser, n ) ); } */ ; in_implicit_map : complex_mapping | in_implicit_map indent_sep basic_seq { if ( $1->shortcut == NULL ) { $1->shortcut = syck_new_seq( $3 ); } else { syck_seq_add( $1->shortcut, $3 ); } $$ = $1; } | in_implicit_map indent_sep complex_mapping { apply_seq_in_map( (SyckParser *)parser, $1 ); syck_map_update( $1, $3 ); syck_free_node( $3 ); $3 = NULL; $$ = $1; } | in_implicit_map indent_sep { $$ = $1; } ; /* * Inline maps */ basic_mapping : atom ':' atom_or_empty { $$ = syck_new_map( syck_hdlr_add_node( (SyckParser *)parser, $1 ), syck_hdlr_add_node( (SyckParser *)parser, $3 ) ); } ; inline_map : '{' in_inline_map '}' { $$ = $2; } | '{' '}' { $$ = syck_alloc_map(); } ; in_inline_map : inline_map_atom | in_inline_map ',' inline_map_atom { syck_map_update( $1, $3 ); syck_free_node( $3 ); $3 = NULL; $$ = $1; } ; inline_map_atom : atom { NULL_NODE( parser, n ); $$ = syck_new_map( syck_hdlr_add_node( (SyckParser *)parser, $1 ), syck_hdlr_add_node( (SyckParser *)parser, n ) ); } | basic_mapping ; %% void apply_seq_in_map( SyckParser *parser, SyckNode *n ) { long map_len; if ( n->shortcut == NULL ) { return; } map_len = syck_map_count( n ); syck_map_assign( n, map_value, map_len - 1, syck_hdlr_add_node( parser, n->shortcut ) ); n->shortcut = NULL; } ruby-mkrf-0.2.3.orig/test/sample_files/syck-0.55/lib/syck_st.h0000644000000000000000000000203111672453175022454 0ustar rootroot/* This is a public domain general purpose hash table package written by Peter Moore @ UCB. */ /* @(#) st.h 5.1 89/12/14 */ #ifndef ST_INCLUDED #define ST_INCLUDED typedef struct st_table st_table; struct st_hash_type { int (*compare)(); int (*hash)(); }; struct st_table { struct st_hash_type *type; int num_bins; int num_entries; struct st_table_entry **bins; }; #define st_is_member(table,key) st_lookup(table,key,(char **)0) enum st_retval {ST_CONTINUE, ST_STOP, ST_DELETE}; st_table *st_init_table(); st_table *st_init_table_with_size(); st_table *st_init_numtable(); st_table *st_init_numtable_with_size(); st_table *st_init_strtable(); st_table *st_init_strtable_with_size(); int st_delete(), st_delete_safe(); int st_insert(), st_lookup(); void st_foreach(), st_add_direct(), st_free_table(), st_cleanup_safe(); st_table *st_copy(); #define ST_NUMCMP ((int (*)()) 0) #define ST_NUMHASH ((int (*)()) -2) #define st_numcmp ST_NUMCMP #define st_numhash ST_NUMHASH int st_strhash(); #endif /* ST_INCLUDED */ ruby-mkrf-0.2.3.orig/test/sample_files/syck-0.55/lib/yaml2byte.c0000644000000000000000000001645411672453175022716 0ustar rootroot/* * yaml2byte.c * * $Author: why $ * $Date: 2004/08/02 17:32:35 $ * * Copyright (C) 2003 why the lucky stiff, clark evans * * WARNING WARNING WARNING --- THIS IS *NOT JUST* PLAYING * ANYMORE! -- WHY HAS EMBRACED THIS AS THE REAL THING! */ #include #include #define YAMLBYTE_UTF8 #include "yamlbyte.h" #include #define TRACE0(a) \ do { printf(a); printf("\n"); fflush(stdout); } while(0) #define TRACE1(a,b) \ do { printf(a,b); printf("\n"); fflush(stdout); } while(0) #define TRACE2(a,b,c) \ do { printf(a,b,c); printf("\n"); fflush(stdout); } while(0) #define TRACE3(a,b,c,d) \ do { printf(a,b,c,d); printf("\n"); fflush(stdout); } while(0) /* Reinvent the wheel... */ #define CHUNKSIZE 64 #define HASH ((long)0xCAFECAFE) typedef struct { long hash; char *buffer; long length; long remaining; int printed; } bytestring_t; bytestring_t *bytestring_alloc() { bytestring_t *ret; /*TRACE0("bytestring_alloc()");*/ ret = S_ALLOC(bytestring_t); ret->hash = HASH; ret->length = CHUNKSIZE; ret->remaining = ret->length; ret->buffer = S_ALLOC_N(char, ret->length + 1 ); ret->buffer[0] = 0; ret->printed = 0; return ret; } void bytestring_append(bytestring_t *str, char code, char *start, char *finish) { long grow; long length = 2; /* CODE + LF */ char *curr; assert(str && HASH == str->hash); /*TRACE0("bytestring_append()");*/ if(start) { if(!finish) finish = start + strlen(start); length += (finish-start); } if(length > str->remaining) { grow = (length - str->remaining) + CHUNKSIZE; str->remaining += grow; str->length += grow; str->buffer = S_REALLOC_N( str->buffer, char, str->length + 1 ); assert(str->buffer); } curr = str->buffer + (str->length - str->remaining); *curr = code; curr += 1; if(start) while(start < finish) *curr ++ = *start ++; *curr = '\n'; curr += 1; *curr = 0; str->remaining = str->remaining - length; assert( (str->buffer + str->length) - str->remaining ); } void bytestring_extend(bytestring_t *str, bytestring_t *ext) { char *from; char *curr; char *stop; long grow; long length; assert(str && HASH == str->hash); assert(ext && HASH == ext->hash); if(ext->printed) { assert(ext->buffer[0] ==YAMLBYTE_ANCHOR); curr = ext->buffer; while( '\n' != *curr) curr++; bytestring_append(str, YAMLBYTE_ALIAS, ext->buffer + 1, curr); } else { ext->printed = 1; length = (ext->length - ext->remaining); if(length > str->remaining) { grow = (length - str->remaining) + CHUNKSIZE; str->remaining += grow; str->length += grow; str->buffer = S_REALLOC_N( str->buffer, char, str->length + 1 ); } curr = str->buffer + (str->length - str->remaining); from = ext->buffer; stop = ext->buffer + length; while( from < stop ) *curr ++ = *from ++; *curr = 0; str->remaining = str->remaining - length; assert( (str->buffer + str->length) - str->remaining ); } } /* convert SyckNode into yamlbyte_buffer_t objects */ SYMID syck_yaml2byte_handler(p, n) SyckParser *p; SyckNode *n; { SYMID oid; long i; char ch; char nextcode; char *start; char *current; char *finish; bytestring_t *val = NULL; bytestring_t *sav = NULL; /*TRACE0("syck_yaml2byte_handler()");*/ val = bytestring_alloc(); if(n->anchor) bytestring_append(val,YAMLBYTE_ANCHOR, n->anchor, NULL); if ( n->type_id ) { if ( p->taguri_expansion ) { bytestring_append(val,YAMLBYTE_TRANSFER, n->type_id, NULL); } else { char *type_tag = S_ALLOC_N( char, strlen( n->type_id ) + 1 ); type_tag[0] = '\0'; strcat( type_tag, "!" ); strcat( type_tag, n->type_id ); bytestring_append( val, YAMLBYTE_TRANSFER, type_tag, NULL); } } switch (n->kind) { case syck_str_kind: nextcode = YAMLBYTE_SCALAR; start = n->data.str->ptr; finish = start + n->data.str->len - 1; current = start; /*TRACE2("SCALAR: %s %d", start, n->data.str->len); */ while(1) { ch = *current; if('\n' == ch || 0 == ch || current > finish) { if(current >= start) { bytestring_append(val, nextcode, start, current); nextcode = YAMLBYTE_CONTINUE; } start = current + 1; if(current > finish) { break; } else if('\n' == ch ) { bytestring_append(val,YAMLBYTE_NEWLINE,NULL,NULL); } else if(0 == ch) { bytestring_append(val,YAMLBYTE_NULLCHAR,NULL,NULL); } else { assert("oops"); } } current += 1; } break; case syck_seq_kind: bytestring_append(val,YAMLBYTE_SEQUENCE,NULL,NULL); for ( i = 0; i < n->data.list->idx; i++ ) { oid = syck_seq_read( n, i ); syck_lookup_sym( p, oid, (char **)&sav ); bytestring_extend(val, sav); } bytestring_append(val,YAMLBYTE_END_BRANCH,NULL,NULL); break; case syck_map_kind: bytestring_append(val,YAMLBYTE_MAPPING,NULL,NULL); for ( i = 0; i < n->data.pairs->idx; i++ ) { oid = syck_map_read( n, map_key, i ); syck_lookup_sym( p, oid, (char **)&sav ); bytestring_extend(val, sav); oid = syck_map_read( n, map_value, i ); syck_lookup_sym( p, oid, (char **)&sav ); bytestring_extend(val, sav); } bytestring_append(val,YAMLBYTE_END_BRANCH,NULL,NULL); break; } oid = syck_add_sym( p, (char *) val ); /*TRACE1("Saving: %s", val->buffer );*/ return oid; } char * syck_yaml2byte(char *yamlstr) { SYMID oid; char *ret; bytestring_t *sav; SyckParser *parser = syck_new_parser(); syck_parser_str_auto( parser, yamlstr, NULL ); syck_parser_handler( parser, syck_yaml2byte_handler ); syck_parser_error_handler( parser, NULL ); syck_parser_implicit_typing( parser, 1 ); syck_parser_taguri_expansion( parser, 1 ); oid = syck_parse( parser ); syck_lookup_sym( parser, oid, (char **)&sav ); ret = S_ALLOC_N( char, strlen( sav->buffer ) + 3 ); ret[0] = '\0'; strcat( ret, "D\n" ); strcat( ret, sav->buffer ); syck_free_parser( parser ); return ret; } #ifdef TEST_YBEXT #include int main() { char *yaml = "test: 1\nand: \"with new\\nline\\n\"\nalso: &3 three\nmore: *3"; printf("--- # YAML \n"); printf(yaml); printf("\n...\n"); printf(syck_yaml2byte(yaml)); return 0; } #endif ruby-mkrf-0.2.3.orig/test/sample_files/syck-0.55/lib/Makefile.in0000644000000000000000000003630311672453175022702 0ustar rootroot# Makefile.in generated by automake 1.9.5 from Makefile.am. # @configure_input@ # Copyright (C) 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002, # 2003, 2004, 2005 Free Software Foundation, Inc. # This Makefile.in is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY, to the extent permitted by law; without # even the implied warranty of MERCHANTABILITY or FITNESS FOR A # PARTICULAR PURPOSE. @SET_MAKE@ SOURCES = $(libsyck_a_SOURCES) srcdir = @srcdir@ top_srcdir = @top_srcdir@ VPATH = @srcdir@ pkgdatadir = $(datadir)/@PACKAGE@ pkglibdir = $(libdir)/@PACKAGE@ pkgincludedir = $(includedir)/@PACKAGE@ top_builddir = .. am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd INSTALL = @INSTALL@ install_sh_DATA = $(install_sh) -c -m 644 install_sh_PROGRAM = $(install_sh) -c install_sh_SCRIPT = $(install_sh) -c INSTALL_HEADER = $(INSTALL_DATA) transform = $(program_transform_name) NORMAL_INSTALL = : PRE_INSTALL = : POST_INSTALL = : NORMAL_UNINSTALL = : PRE_UNINSTALL = : POST_UNINSTALL = : subdir = lib DIST_COMMON = $(include_HEADERS) $(srcdir)/Makefile.am \ $(srcdir)/Makefile.in gram.c gram.h ACLOCAL_M4 = $(top_srcdir)/aclocal.m4 am__aclocal_m4_deps = $(top_srcdir)/configure.in am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \ $(ACLOCAL_M4) mkinstalldirs = $(install_sh) -d CONFIG_HEADER = $(top_builddir)/config.h CONFIG_CLEAN_FILES = am__vpath_adj_setup = srcdirstrip=`echo "$(srcdir)" | sed 's|.|.|g'`; am__vpath_adj = case $$p in \ $(srcdir)/*) f=`echo "$$p" | sed "s|^$$srcdirstrip/||"`;; \ *) f=$$p;; \ esac; am__strip_dir = `echo $$p | sed -e 's|^.*/||'`; am__installdirs = "$(DESTDIR)$(libdir)" "$(DESTDIR)$(includedir)" libLIBRARIES_INSTALL = $(INSTALL_DATA) LIBRARIES = $(lib_LIBRARIES) AR = ar ARFLAGS = cru libsyck_a_AR = $(AR) $(ARFLAGS) libsyck_a_LIBADD = am_libsyck_a_OBJECTS = emitter.$(OBJEXT) handler.$(OBJEXT) \ node.$(OBJEXT) syck.$(OBJEXT) syck_st.$(OBJEXT) gram.$(OBJEXT) \ bytecode.$(OBJEXT) yaml2byte.$(OBJEXT) token.$(OBJEXT) \ implicit.$(OBJEXT) libsyck_a_OBJECTS = $(am_libsyck_a_OBJECTS) DEFAULT_INCLUDES = -I. -I$(srcdir) -I$(top_builddir) depcomp = $(SHELL) $(top_srcdir)/config/depcomp am__depfiles_maybe = depfiles COMPILE = $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) \ $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) CCLD = $(CC) LINK = $(CCLD) $(AM_CFLAGS) $(CFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@ YACCCOMPILE = $(YACC) $(YFLAGS) $(AM_YFLAGS) SOURCES = $(libsyck_a_SOURCES) DIST_SOURCES = $(libsyck_a_SOURCES) includeHEADERS_INSTALL = $(INSTALL_HEADER) HEADERS = $(include_HEADERS) ETAGS = etags CTAGS = ctags DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST) ACLOCAL = @ACLOCAL@ AMDEP_FALSE = @AMDEP_FALSE@ AMDEP_TRUE = @AMDEP_TRUE@ AMTAR = @AMTAR@ AUTOCONF = @AUTOCONF@ AUTOHEADER = @AUTOHEADER@ AUTOMAKE = @AUTOMAKE@ AWK = @AWK@ CC = @CC@ CCDEPMODE = @CCDEPMODE@ CFLAGS = @CFLAGS@ CPP = @CPP@ CPPFLAGS = @CPPFLAGS@ CYGPATH_W = @CYGPATH_W@ DEFS = @DEFS@ DEPDIR = @DEPDIR@ ECHO_C = @ECHO_C@ ECHO_N = @ECHO_N@ ECHO_T = @ECHO_T@ EGREP = @EGREP@ EXEEXT = @EXEEXT@ INSTALL_DATA = @INSTALL_DATA@ INSTALL_PROGRAM = @INSTALL_PROGRAM@ INSTALL_SCRIPT = @INSTALL_SCRIPT@ INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@ LDFLAGS = @LDFLAGS@ LEX = @LEX@ LEXLIB = @LEXLIB@ LEX_OUTPUT_ROOT = @LEX_OUTPUT_ROOT@ LIBOBJS = @LIBOBJS@ LIBS = @LIBS@ LN_S = @LN_S@ LTLIBOBJS = @LTLIBOBJS@ MAKEINFO = @MAKEINFO@ OBJEXT = @OBJEXT@ PACKAGE = @PACKAGE@ PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@ PACKAGE_NAME = @PACKAGE_NAME@ PACKAGE_STRING = @PACKAGE_STRING@ PACKAGE_TARNAME = @PACKAGE_TARNAME@ PACKAGE_VERSION = @PACKAGE_VERSION@ PATH_SEPARATOR = @PATH_SEPARATOR@ RANLIB = @RANLIB@ SET_MAKE = @SET_MAKE@ SHELL = @SHELL@ STRIP = @STRIP@ VERSION = @VERSION@ YACC = @YACC@ ac_ct_CC = @ac_ct_CC@ ac_ct_RANLIB = @ac_ct_RANLIB@ ac_ct_STRIP = @ac_ct_STRIP@ am__fastdepCC_FALSE = @am__fastdepCC_FALSE@ am__fastdepCC_TRUE = @am__fastdepCC_TRUE@ am__include = @am__include@ am__leading_dot = @am__leading_dot@ am__quote = @am__quote@ am__tar = @am__tar@ am__untar = @am__untar@ bindir = @bindir@ build_alias = @build_alias@ datadir = @datadir@ exec_prefix = @exec_prefix@ host_alias = @host_alias@ includedir = @includedir@ infodir = @infodir@ install_sh = @install_sh@ libdir = @libdir@ libexecdir = @libexecdir@ localstatedir = @localstatedir@ mandir = @mandir@ mkdir_p = @mkdir_p@ oldincludedir = @oldincludedir@ prefix = @prefix@ program_transform_name = @program_transform_name@ sbindir = @sbindir@ sharedstatedir = @sharedstatedir@ sysconfdir = @sysconfdir@ target_alias = @target_alias@ INCLUDES = -I$(top_srcdir) AM_YFLAGS = -d -t -v -p syck lib_LIBRARIES = libsyck.a include_HEADERS = syck.h syck_st.h libsyck_a_SOURCES = \ emitter.c \ handler.c \ node.c \ syck.c \ syck_st.c \ gram.y \ bytecode.re \ yaml2byte.c \ token.re \ implicit.re # libsyck_a_LIBADD = $(LEXLIB) REC = re2c all: all-am .SUFFIXES: .SUFFIXES: .c .o .obj .re .y $(srcdir)/Makefile.in: $(srcdir)/Makefile.am $(am__configure_deps) @for dep in $?; do \ case '$(am__configure_deps)' in \ *$$dep*) \ cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh \ && exit 0; \ exit 1;; \ esac; \ done; \ echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign lib/Makefile'; \ cd $(top_srcdir) && \ $(AUTOMAKE) --foreign lib/Makefile .PRECIOUS: Makefile Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status @case '$?' in \ *config.status*) \ cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh;; \ *) \ echo ' cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe)'; \ cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe);; \ esac; $(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(top_srcdir)/configure: $(am__configure_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(ACLOCAL_M4): $(am__aclocal_m4_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh install-libLIBRARIES: $(lib_LIBRARIES) @$(NORMAL_INSTALL) test -z "$(libdir)" || $(mkdir_p) "$(DESTDIR)$(libdir)" @list='$(lib_LIBRARIES)'; for p in $$list; do \ if test -f $$p; then \ f=$(am__strip_dir) \ echo " $(libLIBRARIES_INSTALL) '$$p' '$(DESTDIR)$(libdir)/$$f'"; \ $(libLIBRARIES_INSTALL) "$$p" "$(DESTDIR)$(libdir)/$$f"; \ else :; fi; \ done @$(POST_INSTALL) @list='$(lib_LIBRARIES)'; for p in $$list; do \ if test -f $$p; then \ p=$(am__strip_dir) \ echo " $(RANLIB) '$(DESTDIR)$(libdir)/$$p'"; \ $(RANLIB) "$(DESTDIR)$(libdir)/$$p"; \ else :; fi; \ done uninstall-libLIBRARIES: @$(NORMAL_UNINSTALL) @list='$(lib_LIBRARIES)'; for p in $$list; do \ p=$(am__strip_dir) \ echo " rm -f '$(DESTDIR)$(libdir)/$$p'"; \ rm -f "$(DESTDIR)$(libdir)/$$p"; \ done clean-libLIBRARIES: -test -z "$(lib_LIBRARIES)" || rm -f $(lib_LIBRARIES) gram.h: gram.c @if test ! -f $@; then \ rm -f gram.c; \ $(MAKE) gram.c; \ else :; fi libsyck.a: $(libsyck_a_OBJECTS) $(libsyck_a_DEPENDENCIES) -rm -f libsyck.a $(libsyck_a_AR) libsyck.a $(libsyck_a_OBJECTS) $(libsyck_a_LIBADD) $(RANLIB) libsyck.a mostlyclean-compile: -rm -f *.$(OBJEXT) distclean-compile: -rm -f *.tab.c @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/bytecode.Po@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/emitter.Po@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/gram.Po@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/handler.Po@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/implicit.Po@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/node.Po@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/syck.Po@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/syck_st.Po@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/token.Po@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/yaml2byte.Po@am__quote@ .c.o: @am__fastdepCC_TRUE@ if $(COMPILE) -MT $@ -MD -MP -MF "$(DEPDIR)/$*.Tpo" -c -o $@ $<; \ @am__fastdepCC_TRUE@ then mv -f "$(DEPDIR)/$*.Tpo" "$(DEPDIR)/$*.Po"; else rm -f "$(DEPDIR)/$*.Tpo"; exit 1; fi @AMDEP_TRUE@@am__fastdepCC_FALSE@ source='$<' object='$@' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCC_FALSE@ $(COMPILE) -c $< .c.obj: @am__fastdepCC_TRUE@ if $(COMPILE) -MT $@ -MD -MP -MF "$(DEPDIR)/$*.Tpo" -c -o $@ `$(CYGPATH_W) '$<'`; \ @am__fastdepCC_TRUE@ then mv -f "$(DEPDIR)/$*.Tpo" "$(DEPDIR)/$*.Po"; else rm -f "$(DEPDIR)/$*.Tpo"; exit 1; fi @AMDEP_TRUE@@am__fastdepCC_FALSE@ source='$<' object='$@' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCC_FALSE@ $(COMPILE) -c `$(CYGPATH_W) '$<'` .y.c: $(YACCCOMPILE) $< if test -f y.tab.h; then \ to=`echo "$*_H" | sed \ -e 'y/abcdefghijklmnopqrstuvwxyz/ABCDEFGHIJKLMNOPQRSTUVWXYZ/' \ -e 's/[^ABCDEFGHIJKLMNOPQRSTUVWXYZ]/_/g'`; \ sed -e "/^#/!b" -e "s/Y_TAB_H/$$to/g" -e "s|y\.tab\.h|$*.h|" \ y.tab.h >$*.ht; \ rm -f y.tab.h; \ if cmp -s $*.ht $*.h; then \ rm -f $*.ht ;\ else \ mv $*.ht $*.h; \ fi; \ fi if test -f y.output; then \ mv y.output $*.output; \ fi sed '/^#/ s|y\.tab\.c|$@|' y.tab.c >$@t && mv $@t $@ rm -f y.tab.c uninstall-info-am: install-includeHEADERS: $(include_HEADERS) @$(NORMAL_INSTALL) test -z "$(includedir)" || $(mkdir_p) "$(DESTDIR)$(includedir)" @list='$(include_HEADERS)'; for p in $$list; do \ if test -f "$$p"; then d=; else d="$(srcdir)/"; fi; \ f=$(am__strip_dir) \ echo " $(includeHEADERS_INSTALL) '$$d$$p' '$(DESTDIR)$(includedir)/$$f'"; \ $(includeHEADERS_INSTALL) "$$d$$p" "$(DESTDIR)$(includedir)/$$f"; \ done uninstall-includeHEADERS: @$(NORMAL_UNINSTALL) @list='$(include_HEADERS)'; for p in $$list; do \ f=$(am__strip_dir) \ echo " rm -f '$(DESTDIR)$(includedir)/$$f'"; \ rm -f "$(DESTDIR)$(includedir)/$$f"; \ done ID: $(HEADERS) $(SOURCES) $(LISP) $(TAGS_FILES) list='$(SOURCES) $(HEADERS) $(LISP) $(TAGS_FILES)'; \ unique=`for i in $$list; do \ if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ done | \ $(AWK) ' { files[$$0] = 1; } \ END { for (i in files) print i; }'`; \ mkid -fID $$unique tags: TAGS TAGS: $(HEADERS) $(SOURCES) $(TAGS_DEPENDENCIES) \ $(TAGS_FILES) $(LISP) tags=; \ here=`pwd`; \ list='$(SOURCES) $(HEADERS) $(LISP) $(TAGS_FILES)'; \ unique=`for i in $$list; do \ if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ done | \ $(AWK) ' { files[$$0] = 1; } \ END { for (i in files) print i; }'`; \ if test -z "$(ETAGS_ARGS)$$tags$$unique"; then :; else \ test -n "$$unique" || unique=$$empty_fix; \ $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ $$tags $$unique; \ fi ctags: CTAGS CTAGS: $(HEADERS) $(SOURCES) $(TAGS_DEPENDENCIES) \ $(TAGS_FILES) $(LISP) tags=; \ here=`pwd`; \ list='$(SOURCES) $(HEADERS) $(LISP) $(TAGS_FILES)'; \ unique=`for i in $$list; do \ if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ done | \ $(AWK) ' { files[$$0] = 1; } \ END { for (i in files) print i; }'`; \ test -z "$(CTAGS_ARGS)$$tags$$unique" \ || $(CTAGS) $(CTAGSFLAGS) $(AM_CTAGSFLAGS) $(CTAGS_ARGS) \ $$tags $$unique GTAGS: here=`$(am__cd) $(top_builddir) && pwd` \ && cd $(top_srcdir) \ && gtags -i $(GTAGS_ARGS) $$here distclean-tags: -rm -f TAGS ID GTAGS GRTAGS GSYMS GPATH tags distdir: $(DISTFILES) @srcdirstrip=`echo "$(srcdir)" | sed 's|.|.|g'`; \ topsrcdirstrip=`echo "$(top_srcdir)" | sed 's|.|.|g'`; \ list='$(DISTFILES)'; for file in $$list; do \ case $$file in \ $(srcdir)/*) file=`echo "$$file" | sed "s|^$$srcdirstrip/||"`;; \ $(top_srcdir)/*) file=`echo "$$file" | sed "s|^$$topsrcdirstrip/|$(top_builddir)/|"`;; \ esac; \ if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \ dir=`echo "$$file" | sed -e 's,/[^/]*$$,,'`; \ if test "$$dir" != "$$file" && test "$$dir" != "."; then \ dir="/$$dir"; \ $(mkdir_p) "$(distdir)$$dir"; \ else \ dir=''; \ fi; \ if test -d $$d/$$file; then \ if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \ cp -pR $(srcdir)/$$file $(distdir)$$dir || exit 1; \ fi; \ cp -pR $$d/$$file $(distdir)$$dir || exit 1; \ else \ test -f $(distdir)/$$file \ || cp -p $$d/$$file $(distdir)/$$file \ || exit 1; \ fi; \ done check-am: all-am check: check-am all-am: Makefile $(LIBRARIES) $(HEADERS) installdirs: for dir in "$(DESTDIR)$(libdir)" "$(DESTDIR)$(includedir)"; do \ test -z "$$dir" || $(mkdir_p) "$$dir"; \ done install: install-am install-exec: install-exec-am install-data: install-data-am uninstall: uninstall-am install-am: all-am @$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am installcheck: installcheck-am install-strip: $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ `test -z '$(STRIP)' || \ echo "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'"` install mostlyclean-generic: clean-generic: distclean-generic: -test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES) maintainer-clean-generic: @echo "This command is intended for maintainers to use" @echo "it deletes files that may require special tools to rebuild." -rm -f gram.c -rm -f gram.h clean: clean-am clean-am: clean-generic clean-libLIBRARIES mostlyclean-am distclean: distclean-am -rm -rf ./$(DEPDIR) -rm -f Makefile distclean-am: clean-am distclean-compile distclean-generic \ distclean-tags dvi: dvi-am dvi-am: html: html-am info: info-am info-am: install-data-am: install-includeHEADERS install-exec-am: install-libLIBRARIES install-info: install-info-am install-man: installcheck-am: maintainer-clean: maintainer-clean-am -rm -rf ./$(DEPDIR) -rm -f Makefile maintainer-clean-am: distclean-am maintainer-clean-generic mostlyclean: mostlyclean-am mostlyclean-am: mostlyclean-compile mostlyclean-generic pdf: pdf-am pdf-am: ps: ps-am ps-am: uninstall-am: uninstall-includeHEADERS uninstall-info-am \ uninstall-libLIBRARIES .PHONY: CTAGS GTAGS all all-am check check-am clean clean-generic \ clean-libLIBRARIES ctags distclean distclean-compile \ distclean-generic distclean-tags distdir dvi dvi-am html \ html-am info info-am install install-am install-data \ install-data-am install-exec install-exec-am \ install-includeHEADERS install-info install-info-am \ install-libLIBRARIES install-man install-strip installcheck \ installcheck-am installdirs maintainer-clean \ maintainer-clean-generic mostlyclean mostlyclean-compile \ mostlyclean-generic pdf pdf-am ps ps-am tags uninstall \ uninstall-am uninstall-includeHEADERS uninstall-info-am \ uninstall-libLIBRARIES .re.c: $(REC) $< > $@.new && mv $@.new $@ # Tell versions [3.59,3.63) of GNU make to not export all variables. # Otherwise a system limit (for SysV at least) may be exceeded. .NOEXPORT: ruby-mkrf-0.2.3.orig/test/sample_files/syck-0.55/lib/Makefile0000644000000000000000000003502711672453175022277 0ustar rootroot# Makefile.in generated by automake 1.9.5 from Makefile.am. # lib/Makefile. Generated from Makefile.in by configure. # Copyright (C) 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002, # 2003, 2004, 2005 Free Software Foundation, Inc. # This Makefile.in is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY, to the extent permitted by law; without # even the implied warranty of MERCHANTABILITY or FITNESS FOR A # PARTICULAR PURPOSE. SOURCES = $(libsyck_a_SOURCES) srcdir = . top_srcdir = .. pkgdatadir = $(datadir)/syck pkglibdir = $(libdir)/syck pkgincludedir = $(includedir)/syck top_builddir = .. am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd INSTALL = /usr/bin/install -c install_sh_DATA = $(install_sh) -c -m 644 install_sh_PROGRAM = $(install_sh) -c install_sh_SCRIPT = $(install_sh) -c INSTALL_HEADER = $(INSTALL_DATA) transform = $(program_transform_name) NORMAL_INSTALL = : PRE_INSTALL = : POST_INSTALL = : NORMAL_UNINSTALL = : PRE_UNINSTALL = : POST_UNINSTALL = : subdir = lib DIST_COMMON = $(include_HEADERS) $(srcdir)/Makefile.am \ $(srcdir)/Makefile.in gram.c gram.h ACLOCAL_M4 = $(top_srcdir)/aclocal.m4 am__aclocal_m4_deps = $(top_srcdir)/configure.in am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \ $(ACLOCAL_M4) mkinstalldirs = $(install_sh) -d CONFIG_HEADER = $(top_builddir)/config.h CONFIG_CLEAN_FILES = am__vpath_adj_setup = srcdirstrip=`echo "$(srcdir)" | sed 's|.|.|g'`; am__vpath_adj = case $$p in \ $(srcdir)/*) f=`echo "$$p" | sed "s|^$$srcdirstrip/||"`;; \ *) f=$$p;; \ esac; am__strip_dir = `echo $$p | sed -e 's|^.*/||'`; am__installdirs = "$(DESTDIR)$(libdir)" "$(DESTDIR)$(includedir)" libLIBRARIES_INSTALL = $(INSTALL_DATA) LIBRARIES = $(lib_LIBRARIES) AR = ar ARFLAGS = cru libsyck_a_AR = $(AR) $(ARFLAGS) libsyck_a_LIBADD = am_libsyck_a_OBJECTS = emitter.$(OBJEXT) handler.$(OBJEXT) \ node.$(OBJEXT) syck.$(OBJEXT) syck_st.$(OBJEXT) gram.$(OBJEXT) \ bytecode.$(OBJEXT) yaml2byte.$(OBJEXT) token.$(OBJEXT) \ implicit.$(OBJEXT) libsyck_a_OBJECTS = $(am_libsyck_a_OBJECTS) DEFAULT_INCLUDES = -I. -I$(srcdir) -I$(top_builddir) depcomp = $(SHELL) $(top_srcdir)/config/depcomp am__depfiles_maybe = depfiles COMPILE = $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) \ $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) CCLD = $(CC) LINK = $(CCLD) $(AM_CFLAGS) $(CFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@ YACCCOMPILE = $(YACC) $(YFLAGS) $(AM_YFLAGS) SOURCES = $(libsyck_a_SOURCES) DIST_SOURCES = $(libsyck_a_SOURCES) includeHEADERS_INSTALL = $(INSTALL_HEADER) HEADERS = $(include_HEADERS) ETAGS = etags CTAGS = ctags DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST) ACLOCAL = ${SHELL} /home/why/sand/syck-0.55/config/missing --run aclocal-1.9 AMDEP_FALSE = # AMDEP_TRUE = AMTAR = ${SHELL} /home/why/sand/syck-0.55/config/missing --run tar AUTOCONF = ${SHELL} /home/why/sand/syck-0.55/config/missing --run autoconf AUTOHEADER = ${SHELL} /home/why/sand/syck-0.55/config/missing --run autoheader AUTOMAKE = ${SHELL} /home/why/sand/syck-0.55/config/missing --run automake-1.9 AWK = gawk CC = gcc CCDEPMODE = depmode=gcc3 CFLAGS = -g -O2 CPP = gcc -E CPPFLAGS = CYGPATH_W = echo DEFS = -DHAVE_CONFIG_H DEPDIR = .deps ECHO_C = ECHO_N = -n ECHO_T = EGREP = grep -E EXEEXT = INSTALL_DATA = ${INSTALL} -m 644 INSTALL_PROGRAM = ${INSTALL} INSTALL_SCRIPT = ${INSTALL} INSTALL_STRIP_PROGRAM = ${SHELL} $(install_sh) -c -s LDFLAGS = LEX = flex LEXLIB = -lfl LEX_OUTPUT_ROOT = lex.yy LIBOBJS = LIBS = LN_S = ln -s LTLIBOBJS = MAKEINFO = ${SHELL} /home/why/sand/syck-0.55/config/missing --run makeinfo OBJEXT = o PACKAGE = syck PACKAGE_BUGREPORT = PACKAGE_NAME = syck PACKAGE_STRING = syck 0.54 PACKAGE_TARNAME = syck PACKAGE_VERSION = 0.54 PATH_SEPARATOR = : RANLIB = ranlib SET_MAKE = SHELL = /bin/sh STRIP = VERSION = 0.54 YACC = bison -y ac_ct_CC = gcc ac_ct_RANLIB = ranlib ac_ct_STRIP = am__fastdepCC_FALSE = # am__fastdepCC_TRUE = am__include = include am__leading_dot = . am__quote = am__tar = ${AMTAR} chof - "$$tardir" am__untar = ${AMTAR} xf - bindir = ${exec_prefix}/bin build_alias = datadir = ${prefix}/share exec_prefix = ${prefix} host_alias = includedir = ${prefix}/include infodir = ${prefix}/info install_sh = /home/why/sand/syck-0.55/config/install-sh libdir = ${exec_prefix}/lib libexecdir = ${exec_prefix}/libexec localstatedir = ${prefix}/var mandir = ${prefix}/man mkdir_p = mkdir -p -- oldincludedir = /usr/include prefix = /usr/local program_transform_name = s,x,x, sbindir = ${exec_prefix}/sbin sharedstatedir = ${prefix}/com sysconfdir = ${prefix}/etc target_alias = INCLUDES = -I$(top_srcdir) AM_YFLAGS = -d -t -v -p syck lib_LIBRARIES = libsyck.a include_HEADERS = syck.h syck_st.h libsyck_a_SOURCES = \ emitter.c \ handler.c \ node.c \ syck.c \ syck_st.c \ gram.y \ bytecode.re \ yaml2byte.c \ token.re \ implicit.re # libsyck_a_LIBADD = $(LEXLIB) REC = re2c all: all-am .SUFFIXES: .SUFFIXES: .c .o .obj .re .y $(srcdir)/Makefile.in: $(srcdir)/Makefile.am $(am__configure_deps) @for dep in $?; do \ case '$(am__configure_deps)' in \ *$$dep*) \ cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh \ && exit 0; \ exit 1;; \ esac; \ done; \ echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign lib/Makefile'; \ cd $(top_srcdir) && \ $(AUTOMAKE) --foreign lib/Makefile .PRECIOUS: Makefile Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status @case '$?' in \ *config.status*) \ cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh;; \ *) \ echo ' cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe)'; \ cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe);; \ esac; $(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(top_srcdir)/configure: $(am__configure_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(ACLOCAL_M4): $(am__aclocal_m4_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh install-libLIBRARIES: $(lib_LIBRARIES) @$(NORMAL_INSTALL) test -z "$(libdir)" || $(mkdir_p) "$(DESTDIR)$(libdir)" @list='$(lib_LIBRARIES)'; for p in $$list; do \ if test -f $$p; then \ f=$(am__strip_dir) \ echo " $(libLIBRARIES_INSTALL) '$$p' '$(DESTDIR)$(libdir)/$$f'"; \ $(libLIBRARIES_INSTALL) "$$p" "$(DESTDIR)$(libdir)/$$f"; \ else :; fi; \ done @$(POST_INSTALL) @list='$(lib_LIBRARIES)'; for p in $$list; do \ if test -f $$p; then \ p=$(am__strip_dir) \ echo " $(RANLIB) '$(DESTDIR)$(libdir)/$$p'"; \ $(RANLIB) "$(DESTDIR)$(libdir)/$$p"; \ else :; fi; \ done uninstall-libLIBRARIES: @$(NORMAL_UNINSTALL) @list='$(lib_LIBRARIES)'; for p in $$list; do \ p=$(am__strip_dir) \ echo " rm -f '$(DESTDIR)$(libdir)/$$p'"; \ rm -f "$(DESTDIR)$(libdir)/$$p"; \ done clean-libLIBRARIES: -test -z "$(lib_LIBRARIES)" || rm -f $(lib_LIBRARIES) gram.h: gram.c @if test ! -f $@; then \ rm -f gram.c; \ $(MAKE) gram.c; \ else :; fi libsyck.a: $(libsyck_a_OBJECTS) $(libsyck_a_DEPENDENCIES) -rm -f libsyck.a $(libsyck_a_AR) libsyck.a $(libsyck_a_OBJECTS) $(libsyck_a_LIBADD) $(RANLIB) libsyck.a mostlyclean-compile: -rm -f *.$(OBJEXT) distclean-compile: -rm -f *.tab.c include ./$(DEPDIR)/bytecode.Po include ./$(DEPDIR)/emitter.Po include ./$(DEPDIR)/gram.Po include ./$(DEPDIR)/handler.Po include ./$(DEPDIR)/implicit.Po include ./$(DEPDIR)/node.Po include ./$(DEPDIR)/syck.Po include ./$(DEPDIR)/syck_st.Po include ./$(DEPDIR)/token.Po include ./$(DEPDIR)/yaml2byte.Po .c.o: if $(COMPILE) -MT $@ -MD -MP -MF "$(DEPDIR)/$*.Tpo" -c -o $@ $<; \ then mv -f "$(DEPDIR)/$*.Tpo" "$(DEPDIR)/$*.Po"; else rm -f "$(DEPDIR)/$*.Tpo"; exit 1; fi # source='$<' object='$@' libtool=no \ # DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) \ # $(COMPILE) -c $< .c.obj: if $(COMPILE) -MT $@ -MD -MP -MF "$(DEPDIR)/$*.Tpo" -c -o $@ `$(CYGPATH_W) '$<'`; \ then mv -f "$(DEPDIR)/$*.Tpo" "$(DEPDIR)/$*.Po"; else rm -f "$(DEPDIR)/$*.Tpo"; exit 1; fi # source='$<' object='$@' libtool=no \ # DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) \ # $(COMPILE) -c `$(CYGPATH_W) '$<'` .y.c: $(YACCCOMPILE) $< if test -f y.tab.h; then \ to=`echo "$*_H" | sed \ -e 'y/abcdefghijklmnopqrstuvwxyz/ABCDEFGHIJKLMNOPQRSTUVWXYZ/' \ -e 's/[^ABCDEFGHIJKLMNOPQRSTUVWXYZ]/_/g'`; \ sed -e "/^#/!b" -e "s/Y_TAB_H/$$to/g" -e "s|y\.tab\.h|$*.h|" \ y.tab.h >$*.ht; \ rm -f y.tab.h; \ if cmp -s $*.ht $*.h; then \ rm -f $*.ht ;\ else \ mv $*.ht $*.h; \ fi; \ fi if test -f y.output; then \ mv y.output $*.output; \ fi sed '/^#/ s|y\.tab\.c|$@|' y.tab.c >$@t && mv $@t $@ rm -f y.tab.c uninstall-info-am: install-includeHEADERS: $(include_HEADERS) @$(NORMAL_INSTALL) test -z "$(includedir)" || $(mkdir_p) "$(DESTDIR)$(includedir)" @list='$(include_HEADERS)'; for p in $$list; do \ if test -f "$$p"; then d=; else d="$(srcdir)/"; fi; \ f=$(am__strip_dir) \ echo " $(includeHEADERS_INSTALL) '$$d$$p' '$(DESTDIR)$(includedir)/$$f'"; \ $(includeHEADERS_INSTALL) "$$d$$p" "$(DESTDIR)$(includedir)/$$f"; \ done uninstall-includeHEADERS: @$(NORMAL_UNINSTALL) @list='$(include_HEADERS)'; for p in $$list; do \ f=$(am__strip_dir) \ echo " rm -f '$(DESTDIR)$(includedir)/$$f'"; \ rm -f "$(DESTDIR)$(includedir)/$$f"; \ done ID: $(HEADERS) $(SOURCES) $(LISP) $(TAGS_FILES) list='$(SOURCES) $(HEADERS) $(LISP) $(TAGS_FILES)'; \ unique=`for i in $$list; do \ if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ done | \ $(AWK) ' { files[$$0] = 1; } \ END { for (i in files) print i; }'`; \ mkid -fID $$unique tags: TAGS TAGS: $(HEADERS) $(SOURCES) $(TAGS_DEPENDENCIES) \ $(TAGS_FILES) $(LISP) tags=; \ here=`pwd`; \ list='$(SOURCES) $(HEADERS) $(LISP) $(TAGS_FILES)'; \ unique=`for i in $$list; do \ if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ done | \ $(AWK) ' { files[$$0] = 1; } \ END { for (i in files) print i; }'`; \ if test -z "$(ETAGS_ARGS)$$tags$$unique"; then :; else \ test -n "$$unique" || unique=$$empty_fix; \ $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ $$tags $$unique; \ fi ctags: CTAGS CTAGS: $(HEADERS) $(SOURCES) $(TAGS_DEPENDENCIES) \ $(TAGS_FILES) $(LISP) tags=; \ here=`pwd`; \ list='$(SOURCES) $(HEADERS) $(LISP) $(TAGS_FILES)'; \ unique=`for i in $$list; do \ if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ done | \ $(AWK) ' { files[$$0] = 1; } \ END { for (i in files) print i; }'`; \ test -z "$(CTAGS_ARGS)$$tags$$unique" \ || $(CTAGS) $(CTAGSFLAGS) $(AM_CTAGSFLAGS) $(CTAGS_ARGS) \ $$tags $$unique GTAGS: here=`$(am__cd) $(top_builddir) && pwd` \ && cd $(top_srcdir) \ && gtags -i $(GTAGS_ARGS) $$here distclean-tags: -rm -f TAGS ID GTAGS GRTAGS GSYMS GPATH tags distdir: $(DISTFILES) @srcdirstrip=`echo "$(srcdir)" | sed 's|.|.|g'`; \ topsrcdirstrip=`echo "$(top_srcdir)" | sed 's|.|.|g'`; \ list='$(DISTFILES)'; for file in $$list; do \ case $$file in \ $(srcdir)/*) file=`echo "$$file" | sed "s|^$$srcdirstrip/||"`;; \ $(top_srcdir)/*) file=`echo "$$file" | sed "s|^$$topsrcdirstrip/|$(top_builddir)/|"`;; \ esac; \ if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \ dir=`echo "$$file" | sed -e 's,/[^/]*$$,,'`; \ if test "$$dir" != "$$file" && test "$$dir" != "."; then \ dir="/$$dir"; \ $(mkdir_p) "$(distdir)$$dir"; \ else \ dir=''; \ fi; \ if test -d $$d/$$file; then \ if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \ cp -pR $(srcdir)/$$file $(distdir)$$dir || exit 1; \ fi; \ cp -pR $$d/$$file $(distdir)$$dir || exit 1; \ else \ test -f $(distdir)/$$file \ || cp -p $$d/$$file $(distdir)/$$file \ || exit 1; \ fi; \ done check-am: all-am check: check-am all-am: Makefile $(LIBRARIES) $(HEADERS) installdirs: for dir in "$(DESTDIR)$(libdir)" "$(DESTDIR)$(includedir)"; do \ test -z "$$dir" || $(mkdir_p) "$$dir"; \ done install: install-am install-exec: install-exec-am install-data: install-data-am uninstall: uninstall-am install-am: all-am @$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am installcheck: installcheck-am install-strip: $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ `test -z '$(STRIP)' || \ echo "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'"` install mostlyclean-generic: clean-generic: distclean-generic: -test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES) maintainer-clean-generic: @echo "This command is intended for maintainers to use" @echo "it deletes files that may require special tools to rebuild." -rm -f gram.c -rm -f gram.h clean: clean-am clean-am: clean-generic clean-libLIBRARIES mostlyclean-am distclean: distclean-am -rm -rf ./$(DEPDIR) -rm -f Makefile distclean-am: clean-am distclean-compile distclean-generic \ distclean-tags dvi: dvi-am dvi-am: html: html-am info: info-am info-am: install-data-am: install-includeHEADERS install-exec-am: install-libLIBRARIES install-info: install-info-am install-man: installcheck-am: maintainer-clean: maintainer-clean-am -rm -rf ./$(DEPDIR) -rm -f Makefile maintainer-clean-am: distclean-am maintainer-clean-generic mostlyclean: mostlyclean-am mostlyclean-am: mostlyclean-compile mostlyclean-generic pdf: pdf-am pdf-am: ps: ps-am ps-am: uninstall-am: uninstall-includeHEADERS uninstall-info-am \ uninstall-libLIBRARIES .PHONY: CTAGS GTAGS all all-am check check-am clean clean-generic \ clean-libLIBRARIES ctags distclean distclean-compile \ distclean-generic distclean-tags distdir dvi dvi-am html \ html-am info info-am install install-am install-data \ install-data-am install-exec install-exec-am \ install-includeHEADERS install-info install-info-am \ install-libLIBRARIES install-man install-strip installcheck \ installcheck-am installdirs maintainer-clean \ maintainer-clean-generic mostlyclean mostlyclean-compile \ mostlyclean-generic pdf pdf-am ps ps-am tags uninstall \ uninstall-am uninstall-includeHEADERS uninstall-info-am \ uninstall-libLIBRARIES .re.c: $(REC) $< > $@.new && mv $@.new $@ # Tell versions [3.59,3.63) of GNU make to not export all variables. # Otherwise a system limit (for SysV at least) may be exceeded. .NOEXPORT: ruby-mkrf-0.2.3.orig/test/sample_files/syck-0.55/lib/emitter.c0000644000000000000000000010315011672453175022445 0ustar rootroot/* * emitter.c * * $Author: why $ * $Date: 2005/05/19 06:07:42 $ * * Copyright (C) 2003 why the lucky stiff * * All Base64 code from Ruby's pack.c. * Ruby is Copyright (C) 1993-2003 Yukihiro Matsumoto */ #include #include #include "syck.h" #define DEFAULT_ANCHOR_FORMAT "id%03d" const char hex_table[] = "0123456789ABCDEF"; static char b64_table[] = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/"; /* * Built-in base64 (from Ruby's pack.c) */ char * syck_base64enc( char *s, long len ) { long i = 0; int padding = '='; char *buff = S_ALLOC_N(char, len * 4 / 3 + 6); while (len >= 3) { buff[i++] = b64_table[077 & (*s >> 2)]; buff[i++] = b64_table[077 & (((*s << 4) & 060) | ((s[1] >> 4) & 017))]; buff[i++] = b64_table[077 & (((s[1] << 2) & 074) | ((s[2] >> 6) & 03))]; buff[i++] = b64_table[077 & s[2]]; s += 3; len -= 3; } if (len == 2) { buff[i++] = b64_table[077 & (*s >> 2)]; buff[i++] = b64_table[077 & (((*s << 4) & 060) | ((s[1] >> 4) & 017))]; buff[i++] = b64_table[077 & (((s[1] << 2) & 074) | (('\0' >> 6) & 03))]; buff[i++] = padding; } else if (len == 1) { buff[i++] = b64_table[077 & (*s >> 2)]; buff[i++] = b64_table[077 & (((*s << 4) & 060) | (('\0' >> 4) & 017))]; buff[i++] = padding; buff[i++] = padding; } buff[i++] = '\n'; return buff; } char * syck_base64dec( char *s, long len ) { int a = -1,b = -1,c = 0,d; static int first = 1; static int b64_xtable[256]; char *ptr = syck_strndup( s, len ); char *end = ptr; char *send = s + len; if (first) { int i; first = 0; for (i = 0; i < 256; i++) { b64_xtable[i] = -1; } for (i = 0; i < 64; i++) { b64_xtable[(int)b64_table[i]] = i; } } while (s < send) { while (s[0] == '\r' || s[0] == '\n') { s++; } if ((a = b64_xtable[(int)s[0]]) == -1) break; if ((b = b64_xtable[(int)s[1]]) == -1) break; if ((c = b64_xtable[(int)s[2]]) == -1) break; if ((d = b64_xtable[(int)s[3]]) == -1) break; *end++ = a << 2 | b >> 4; *end++ = b << 4 | c >> 2; *end++ = c << 6 | d; s += 4; } if (a != -1 && b != -1) { if (s + 2 < send && s[2] == '=') *end++ = a << 2 | b >> 4; if (c != -1 && s + 3 < send && s[3] == '=') { *end++ = a << 2 | b >> 4; *end++ = b << 4 | c >> 2; } } *end = '\0'; /*RSTRING(buf)->len = ptr - RSTRING(buf)->ptr;*/ return ptr; } /* * Allocate an emitter */ SyckEmitter * syck_new_emitter() { SyckEmitter *e; e = S_ALLOC( SyckEmitter ); e->headless = 0; e->use_header = 0; e->use_version = 0; e->sort_keys = 0; e->anchor_format = NULL; e->explicit_typing = 0; e->best_width = 80; e->style = scalar_none; e->stage = doc_open; e->indent = 2; e->level = -1; e->anchors = NULL; e->markers = NULL; e->anchored = NULL; e->bufsize = SYCK_BUFFERSIZE; e->buffer = NULL; e->marker = NULL; e->bufpos = 0; e->emitter_handler = NULL; e->output_handler = NULL; e->lvl_idx = 0; e->lvl_capa = ALLOC_CT; e->levels = S_ALLOC_N( SyckLevel, e->lvl_capa ); syck_emitter_reset_levels( e ); e->bonus = NULL; return e; } int syck_st_free_anchors( char *key, char *name, char *arg ) { S_FREE( name ); return ST_CONTINUE; } void syck_emitter_st_free( SyckEmitter *e ) { /* * Free the anchor tables */ if ( e->anchors != NULL ) { st_foreach( e->anchors, syck_st_free_anchors, 0 ); st_free_table( e->anchors ); e->anchors = NULL; } if ( e->anchored != NULL ) { st_free_table( e->anchored ); e->anchored = NULL; } /* * Free the markers tables */ if ( e->markers != NULL ) { st_free_table( e->markers ); e->markers = NULL; } } SyckLevel * syck_emitter_current_level( SyckEmitter *e ) { return &e->levels[e->lvl_idx-1]; } SyckLevel * syck_emitter_parent_level( SyckEmitter *e ) { return &e->levels[e->lvl_idx-2]; } void syck_emitter_pop_level( SyckEmitter *e ) { ASSERT( e != NULL ); /* The root level should never be popped */ if ( e->lvl_idx <= 1 ) return; e->lvl_idx -= 1; free( e->levels[e->lvl_idx].domain ); } void syck_emitter_add_level( SyckEmitter *e, int len, enum syck_level_status status ) { ASSERT( e != NULL ); if ( e->lvl_idx + 1 > e->lvl_capa ) { e->lvl_capa += ALLOC_CT; S_REALLOC_N( e->levels, SyckLevel, e->lvl_capa ); } ASSERT( len > e->levels[e->lvl_idx-1].spaces ); e->levels[e->lvl_idx].spaces = len; e->levels[e->lvl_idx].ncount = 0; e->levels[e->lvl_idx].domain = syck_strndup( e->levels[e->lvl_idx-1].domain, strlen( e->levels[e->lvl_idx-1].domain ) ); e->levels[e->lvl_idx].status = status; e->levels[e->lvl_idx].anctag = 0; e->lvl_idx += 1; } void syck_emitter_reset_levels( SyckEmitter *e ) { while ( e->lvl_idx > 1 ) { syck_emitter_pop_level( e ); } if ( e->lvl_idx < 1 ) { e->lvl_idx = 1; e->levels[0].spaces = -1; e->levels[0].ncount = 0; e->levels[0].domain = syck_strndup( "", 0 ); e->levels[0].anctag = 0; } e->levels[0].status = syck_lvl_header; } void syck_emitter_handler( SyckEmitter *e, SyckEmitterHandler hdlr ) { e->emitter_handler = hdlr; } void syck_output_handler( SyckEmitter *e, SyckOutputHandler hdlr ) { e->output_handler = hdlr; } void syck_free_emitter( SyckEmitter *e ) { /* * Free tables */ syck_emitter_st_free( e ); syck_emitter_reset_levels( e ); S_FREE( e->levels[0].domain ); S_FREE( e->levels ); if ( e->buffer != NULL ) { S_FREE( e->buffer ); } S_FREE( e ); } void syck_emitter_clear( SyckEmitter *e ) { if ( e->buffer == NULL ) { e->buffer = S_ALLOC_N( char, e->bufsize ); S_MEMZERO( e->buffer, char, e->bufsize ); } e->buffer[0] = '\0'; e->marker = e->buffer; e->bufpos = 0; } /* * Raw write to the emitter buffer. */ void syck_emitter_write( SyckEmitter *e, char *str, long len ) { long at; ASSERT( str != NULL ) if ( e->buffer == NULL ) { syck_emitter_clear( e ); } /* * Flush if at end of buffer */ at = e->marker - e->buffer; if ( len + at >= e->bufsize ) { syck_emitter_flush( e, 0 ); for (;;) { long rest = e->bufsize - (e->marker - e->buffer); if (len <= rest) break; S_MEMCPY( e->marker, str, char, rest ); e->marker += rest; str += rest; len -= rest; syck_emitter_flush( e, 0 ); } } /* * Write to buffer */ S_MEMCPY( e->marker, str, char, len ); e->marker += len; e->marker[0] = '\0'; } /* * Write a chunk of data out. */ void syck_emitter_flush( SyckEmitter *e, long check_room ) { /* * Check for enough space in the buffer for check_room length. */ if ( check_room > 0 ) { if ( e->bufsize > ( e->marker - e->buffer ) + check_room ) { return; } } else { check_room = e->bufsize; } /* * Determine headers. */ if ( ( e->stage == doc_open && ( e->headless == 0 || e->use_header == 1 ) ) || e->stage == doc_need_header ) { if ( e->use_version == 1 ) { char *header = S_ALLOC_N( char, 64 ); S_MEMZERO( header, char, 64 ); sprintf( header, "--- %%YAML:%d.%d ", SYCK_YAML_MAJOR, SYCK_YAML_MINOR ); (e->output_handler)( e, header, strlen( header ) ); S_FREE( header ); } else { (e->output_handler)( e, "--- ", 4 ); } e->stage = doc_processing; } /* * Commit buffer. */ if ( check_room > e->marker - e->buffer ) { check_room = e->marker - e->buffer; } (e->output_handler)( e, e->buffer, check_room ); e->bufpos += check_room; e->marker -= check_room; } /* * Start emitting from the given node, check for anchoring and then * issue the callback to the emitter handler. */ void syck_emit( SyckEmitter *e, st_data_t n ) { SYMID oid; char *anchor_name = NULL; int indent = 0, x = 0; SyckLevel *lvl = syck_emitter_current_level( e ); /* Add new level */ if ( lvl->spaces >= 0 ) { indent = lvl->spaces + e->indent; } syck_emitter_add_level( e, indent, syck_lvl_open ); lvl = syck_emitter_current_level( e ); /* Look for anchor */ if ( e->anchors != NULL && st_lookup( e->markers, n, (st_data_t *)&oid ) && st_lookup( e->anchors, (st_data_t)oid, (st_data_t *)&anchor_name ) ) { if ( e->anchored == NULL ) { e->anchored = st_init_numtable(); } if ( ! st_lookup( e->anchored, (st_data_t)anchor_name, (st_data_t *)&x ) ) { char *an = S_ALLOC_N( char, strlen( anchor_name ) + 3 ); sprintf( an, "&%s ", anchor_name ); syck_emitter_write( e, an, strlen( anchor_name ) + 2 ); free( an ); x = 1; st_insert( e->anchored, (st_data_t)anchor_name, (st_data_t)x ); lvl->anctag = 1; } else { char *an = S_ALLOC_N( char, strlen( anchor_name ) + 2 ); sprintf( an, "*%s", anchor_name ); syck_emitter_write( e, an, strlen( anchor_name ) + 1 ); free( an ); goto end_emit; } } (e->emitter_handler)( e, n ); /* Pop the level */ end_emit: syck_emitter_pop_level( e ); if ( e->lvl_idx == 1 ) { syck_emitter_write( e, "\n", 1 ); e->stage = doc_open; } } /* * Determine what tag needs to be written, based on the taguri of the node * and the implicit tag which would be assigned to this node. If a tag is * required, write the tag. */ void syck_emit_tag( SyckEmitter *e, char *tag, char *ignore ) { SyckLevel *lvl; if ( tag == NULL ) return; if ( ignore != NULL && syck_tagcmp( tag, ignore ) == 0 && e->explicit_typing == 0 ) return; lvl = syck_emitter_current_level( e ); /* implicit */ if ( strlen( tag ) == 0 ) { syck_emitter_write( e, "! ", 2 ); /* global types */ } else if ( strncmp( tag, "tag:", 4 ) == 0 ) { int taglen = strlen( tag ); syck_emitter_write( e, "!", 1 ); if ( strncmp( tag + 4, YAML_DOMAIN, strlen( YAML_DOMAIN ) ) == 0 ) { int skip = 4 + strlen( YAML_DOMAIN ) + 1; syck_emitter_write( e, tag + skip, taglen - skip ); } else { char *subd = tag + 4; while ( *subd != ':' && *subd != '\0' ) subd++; if ( *subd == ':' ) { if ( subd - tag > ( strlen( YAML_DOMAIN ) + 5 ) && strncmp( subd - strlen( YAML_DOMAIN ), YAML_DOMAIN, strlen( YAML_DOMAIN ) ) == 0 ) { syck_emitter_write( e, tag + 4, subd - strlen( YAML_DOMAIN ) - ( tag + 4 ) - 1 ); syck_emitter_write( e, "/", 1 ); syck_emitter_write( e, subd + 1, ( tag + taglen ) - ( subd + 1 ) ); } else { syck_emitter_write( e, tag + 4, subd - ( tag + 4 ) ); syck_emitter_write( e, "/", 1 ); syck_emitter_write( e, subd + 1, ( tag + taglen ) - ( subd + 1 ) ); } } else { /* TODO: Invalid tag (no colon after domain) */ return; } } syck_emitter_write( e, " ", 1 ); /* private types */ } else if ( strncmp( tag, "x-private:", 10 ) == 0 ) { syck_emitter_write( e, "!!", 2 ); syck_emitter_write( e, tag + 10, strlen( tag ) - 10 ); syck_emitter_write( e, " ", 1 ); } lvl->anctag = 1; } /* * Emit a newline and an appropriately spaced indent. */ void syck_emit_indent( SyckEmitter *e ) { int i; SyckLevel *lvl = syck_emitter_current_level( e ); if ( lvl->spaces >= 0 ) { char *spcs = S_ALLOC_N( char, lvl->spaces + 2 ); spcs[0] = '\n'; spcs[lvl->spaces + 1] = '\0'; for ( i = 0; i < lvl->spaces; i++ ) spcs[i+1] = ' '; syck_emitter_write( e, spcs, lvl->spaces + 1 ); free( spcs ); } } /* Clear the scan */ #define SCAN_NONE 0 /* All printable characters? */ #define SCAN_NONPRINT 1 /* Any indented lines? */ #define SCAN_INDENTED 2 /* Larger than the requested width? */ #define SCAN_WIDE 4 /* Opens with whitespace? */ #define SCAN_WHITESTART 8 /* Contains a newline */ #define SCAN_NEWLINE 16 /* Contains a single quote */ #define SCAN_SINGLEQ 32 /* Contains a double quote */ #define SCAN_DOUBLEQ 64 /* Starts with a token */ #define SCAN_INDIC_S 128 /* Contains a flow indicator */ #define SCAN_INDIC_C 256 /* Ends without newlines */ #define SCAN_NONL_E 512 /* Ends with many newlines */ #define SCAN_MANYNL_E 1024 /* Contains flow map indicators */ #define SCAN_FLOWMAP 2048 /* Contains flow seq indicators */ #define SCAN_FLOWSEQ 4096 /* Contains a valid doc separator */ #define SCAN_DOCSEP 8192 /* * Basic printable test for LATIN-1 characters. */ int syck_scan_scalar( int req_width, char *cursor, long len ) { long i = 0, start = 0; int flags = SCAN_NONE; if ( len < 1 ) return flags; /* c-indicators from the spec */ if ( cursor[0] == '[' || cursor[0] == ']' || cursor[0] == '{' || cursor[0] == '}' || cursor[0] == '!' || cursor[0] == '*' || cursor[0] == '&' || cursor[0] == '|' || cursor[0] == '>' || cursor[0] == '\'' || cursor[0] == '"' || cursor[0] == '#' || cursor[0] == '%' || cursor[0] == '@' || cursor[0] == '&' ) { flags |= SCAN_INDIC_S; } if ( ( cursor[0] == '-' || cursor[0] == ':' || cursor[0] == '?' || cursor[0] == ',' ) && cursor[1] == ' ' ) { flags |= SCAN_INDIC_S; } /* ending newlines */ if ( cursor[len-1] != '\n' ) { flags |= SCAN_NONL_E; } else if ( len > 1 && cursor[len-2] == '\n' ) { flags |= SCAN_MANYNL_E; } /* opening doc sep */ if ( len >= 3 && strncmp( cursor, "---", 3 ) == 0 ) flags |= SCAN_DOCSEP; /* scan string */ for ( i = 0; i < len; i++ ) { if ( ! ( cursor[i] == 0x9 || cursor[i] == 0xA || cursor[i] == 0xD || ( cursor[i] >= 0x20 && cursor[i] <= 0x7E ) ) ) { flags |= SCAN_NONPRINT; } else if ( cursor[i] == '\n' ) { flags |= SCAN_NEWLINE; if ( len - i >= 3 && strncmp( &cursor[i+1], "---", 3 ) == 0 ) flags |= SCAN_DOCSEP; if ( cursor[i+1] == ' ' || cursor[i+1] == '\t' ) flags |= SCAN_INDENTED; if ( req_width > 0 && i - start > req_width ) flags |= SCAN_WIDE; start = i; } else if ( cursor[i] == '\'' ) { flags |= SCAN_SINGLEQ; } else if ( cursor[i] == '"' ) { flags |= SCAN_DOUBLEQ; } else if ( cursor[i] == ']' ) { flags |= SCAN_FLOWSEQ; } else if ( cursor[i] == '}' ) { flags |= SCAN_FLOWMAP; } /* remember, if plain collections get implemented, to add nb-plain-flow-char */ else if ( ( cursor[i] == ' ' && cursor[i+1] == '#' ) || ( cursor[i] == ':' && cursor[i+1] == ' ' ) ) { flags |= SCAN_INDIC_C; } else if ( cursor[i] == ',' && cursor[i+1] == ' ' ) { flags |= SCAN_FLOWMAP; flags |= SCAN_FLOWSEQ; } if ( i == 0 && ( cursor[i] == ' ' || cursor[i] == '\t' ) ) { flags |= SCAN_WHITESTART; } } /* printf( "---STR---\n%s\nFLAGS: %d\n", cursor, flags ); */ return flags; } /* * All scalars should be emitted through this function, which determines an appropriate style, * tag and indent. */ void syck_emit_scalar( SyckEmitter *e, char *tag, enum scalar_style force_style, int force_indent, int force_width, char keep_nl, char *str, long len ) { enum scalar_style favor_style = scalar_literal; SyckLevel *parent = syck_emitter_parent_level( e ); SyckLevel *lvl = syck_emitter_current_level( e ); int scan; char *implicit; if ( str == NULL ) str = ""; /* No empty nulls as map keys */ if ( len == 0 && ( parent->status == syck_lvl_map || parent->status == syck_lvl_imap ) && parent->ncount % 2 == 1 && syck_tagcmp( tag, "tag:yaml.org,2002:null" ) == 0 ) { str = "~"; len = 1; } scan = syck_scan_scalar( force_width, str, len ); implicit = syck_match_implicit( str, len ); /* quote strings which default to implicits */ implicit = syck_taguri( YAML_DOMAIN, implicit, strlen( implicit ) ); if ( syck_tagcmp( tag, implicit ) != 0 && syck_tagcmp( tag, "tag:yaml.org,2002:str" ) == 0 ) { force_style = scalar_2quote; } else { syck_emit_tag( e, tag, implicit ); } S_FREE( implicit ); /* if still arbitrary, sniff a good block style. */ if ( force_style == scalar_none ) { if ( scan & SCAN_NEWLINE ) { force_style = scalar_literal; } else { force_style = scalar_plain; } } if ( e->style == scalar_fold ) { favor_style = scalar_fold; } /* Determine block style */ if ( scan & SCAN_NONPRINT ) { force_style = scalar_2quote; } else if ( scan & SCAN_WHITESTART ) { force_style = scalar_2quote; } else if ( force_style != scalar_fold && ( scan & SCAN_INDENTED ) ) { force_style = scalar_literal; } else if ( force_style == scalar_plain && ( scan & SCAN_NEWLINE ) ) { force_style = favor_style; } else if ( force_style == scalar_plain && parent->status == syck_lvl_iseq && ( scan & SCAN_FLOWSEQ ) ) { force_style = scalar_2quote; } else if ( force_style == scalar_plain && parent->status == syck_lvl_imap && ( scan & SCAN_FLOWMAP ) ) { force_style = scalar_2quote; /* } else if ( force_style == scalar_fold && ( ! ( scan & SCAN_WIDE ) ) ) { force_style = scalar_literal; */ } else if ( force_style == scalar_plain && ( scan & SCAN_INDIC_S || scan & SCAN_INDIC_C ) ) { if ( scan & SCAN_NEWLINE ) { force_style = favor_style; } else { force_style = scalar_2quote; } } if ( force_indent > 0 ) { lvl->spaces = parent->spaces + force_indent; } else if ( scan & SCAN_DOCSEP ) { lvl->spaces = parent->spaces + e->indent; } /* For now, all ambiguous keys are going to be double-quoted */ if ( parent->status == syck_lvl_map && parent->ncount % 2 == 1 ) { if ( force_style != scalar_plain ) { force_style = scalar_2quote; } } /* If the parent is an inline, double quote anything complex */ if ( parent->status == syck_lvl_imap || parent->status == syck_lvl_iseq ) { if ( force_style != scalar_plain && force_style != scalar_1quote ) { force_style = scalar_2quote; } } /* Fix the ending newlines */ if ( scan & SCAN_NONL_E ) { keep_nl = NL_CHOMP; } else if ( scan & SCAN_MANYNL_E ) { keep_nl = NL_KEEP; } /* Write the text node */ switch ( force_style ) { case scalar_1quote: syck_emit_1quoted( e, force_width, str, len ); break; case scalar_2quote: syck_emit_2quoted( e, force_width, str, len ); break; case scalar_fold: syck_emit_folded( e, force_width, keep_nl, str, len ); break; case scalar_literal: syck_emit_literal( e, keep_nl, str, len ); break; case scalar_plain: syck_emitter_write( e, str, len ); break; } } void syck_emitter_escape( SyckEmitter *e, char *src, long len ) { int i; for( i = 0; i < len; i++ ) { if( (src[i] < 0x20) || (0x7E < src[i]) ) { syck_emitter_write( e, "\\", 1 ); if( '\0' == src[i] ) syck_emitter_write( e, "0", 1 ); else { syck_emitter_write( e, "x", 1 ); syck_emitter_write( e, (char *)hex_table + ((src[i] & 0xF0) >> 4), 1 ); syck_emitter_write( e, (char *)hex_table + (src[i] & 0x0F), 1 ); } } else { syck_emitter_write( e, src + i, 1 ); if( '\\' == src[i] ) syck_emitter_write( e, "\\", 1 ); } } } /* * Outputs a single-quoted block. */ void syck_emit_1quoted( SyckEmitter *e, int width, char *str, long len ) { char do_indent = 0; char *mark = str; char *start = str; char *end = str; syck_emitter_write( e, "'", 1 ); while ( mark < str + len ) { if ( do_indent ) { syck_emit_indent( e ); do_indent = 0; } switch ( *mark ) { case '\'': syck_emitter_write( e, "'", 1 ); break; case '\n': end = mark + 1; if ( *start != ' ' && *start != '\n' && *end != '\n' && *end != ' ' ) { syck_emitter_write( e, "\n\n", 2 ); } else { syck_emitter_write( e, "\n", 1 ); } do_indent = 1; start = mark + 1; break; case ' ': if ( width > 0 && *start != ' ' && mark - end > width ) { do_indent = 1; end = mark + 1; } else { syck_emitter_write( e, " ", 1 ); } break; default: syck_emitter_write( e, mark, 1 ); break; } mark++; } syck_emitter_write( e, "'", 1 ); } /* * Outputs a double-quoted block. */ void syck_emit_2quoted( SyckEmitter *e, int width, char *str, long len ) { char do_indent = 0; char *mark = str; char *start = str; char *end = str; syck_emitter_write( e, "\"", 1 ); while ( mark < str + len ) { if ( do_indent > 0 ) { if ( do_indent == 2 ) { syck_emitter_write( e, "\\", 1 ); } syck_emit_indent( e ); do_indent = 0; } switch ( *mark ) { /* Escape sequences allowed within double quotes. */ case '"': syck_emitter_write( e, "\\\"", 2 ); break; case '\\': syck_emitter_write( e, "\\\\", 2 ); break; case '\0': syck_emitter_write( e, "\\0", 2 ); break; case '\a': syck_emitter_write( e, "\\a", 2 ); break; case '\b': syck_emitter_write( e, "\\b", 2 ); break; case '\f': syck_emitter_write( e, "\\f", 2 ); break; case '\r': syck_emitter_write( e, "\\r", 2 ); break; case '\t': syck_emitter_write( e, "\\t", 2 ); break; case '\v': syck_emitter_write( e, "\\v", 2 ); break; case 0x1b: syck_emitter_write( e, "\\e", 2 ); break; case '\n': end = mark + 1; syck_emitter_write( e, "\\n", 2 ); do_indent = 2; start = mark + 1; if ( start < str + len && ( *start == ' ' || *start == '\n' ) ) { do_indent = 0; } break; case ' ': if ( width > 0 && *start != ' ' && mark - end > width ) { do_indent = 1; end = mark + 1; } else { syck_emitter_write( e, " ", 1 ); } break; default: syck_emitter_escape( e, mark, 1 ); break; } mark++; } syck_emitter_write( e, "\"", 1 ); } /* * Outputs a literal block. */ void syck_emit_literal( SyckEmitter *e, char keep_nl, char *str, long len ) { char *mark = str; char *start = str; char *end = str; syck_emitter_write( e, "|", 1 ); if ( keep_nl == NL_CHOMP ) { syck_emitter_write( e, "-", 1 ); } else if ( keep_nl == NL_KEEP ) { syck_emitter_write( e, "+", 1 ); } syck_emit_indent( e ); while ( mark < str + len ) { if ( *mark == '\n' ) { end = mark; if ( *start != ' ' && *start != '\n' && *end != '\n' && *end != ' ' ) end += 1; syck_emitter_write( e, start, end - start ); if ( mark + 1 == str + len ) { if ( keep_nl != NL_KEEP ) syck_emitter_write( e, "\n", 1 ); } else { syck_emit_indent( e ); } start = mark + 1; } mark++; } end = str + len; if ( start < end ) { syck_emitter_write( e, start, end - start ); } } /* * Outputs a folded block. */ void syck_emit_folded( SyckEmitter *e, int width, char keep_nl, char *str, long len ) { char *mark = str; char *start = str; char *end = str; syck_emitter_write( e, ">", 1 ); if ( keep_nl == NL_CHOMP ) { syck_emitter_write( e, "-", 1 ); } else if ( keep_nl == NL_KEEP ) { syck_emitter_write( e, "+", 1 ); } syck_emit_indent( e ); if ( width <= 0 ) width = e->best_width; while ( mark < str + len ) { switch ( *mark ) { case '\n': syck_emitter_write( e, end, mark - end ); end = mark + 1; if ( *start != ' ' && *start != '\n' && *end != '\n' && *end != ' ' ) { syck_emitter_write( e, "\n", 1 ); } if ( mark + 1 == str + len ) { if ( keep_nl != NL_KEEP ) syck_emitter_write( e, "\n", 1 ); } else { syck_emit_indent( e ); } start = mark + 1; break; case ' ': if ( *start != ' ' ) { if ( mark - end > width ) { syck_emitter_write( e, end, mark - end ); syck_emit_indent( e ); end = mark + 1; } } break; } mark++; } if ( end < mark ) { syck_emitter_write( e, end, mark - end ); } } /* * Begins emission of a sequence. */ void syck_emit_seq( SyckEmitter *e, char *tag, enum seq_style style ) { SyckLevel *parent = syck_emitter_parent_level( e ); SyckLevel *lvl = syck_emitter_current_level( e ); syck_emit_tag( e, tag, "tag:yaml.org,2002:seq" ); if ( style == seq_inline || ( parent->status == syck_lvl_imap || parent->status == syck_lvl_iseq ) ) { syck_emitter_write( e, "[", 1 ); lvl->status = syck_lvl_iseq; } else { lvl->status = syck_lvl_seq; } } /* * Begins emission of a mapping. */ void syck_emit_map( SyckEmitter *e, char *tag, enum map_style style ) { SyckLevel *parent = syck_emitter_parent_level( e ); SyckLevel *lvl = syck_emitter_current_level( e ); syck_emit_tag( e, tag, "tag:yaml.org,2002:map" ); if ( style == map_inline || ( parent->status == syck_lvl_imap || parent->status == syck_lvl_iseq ) ) { syck_emitter_write( e, "{", 1 ); lvl->status = syck_lvl_imap; } else { lvl->status = syck_lvl_map; } } /* * Handles emitting of a collection item (for both * sequences and maps) */ void syck_emit_item( SyckEmitter *e, st_data_t n ) { SyckLevel *lvl = syck_emitter_current_level( e ); switch ( lvl->status ) { case syck_lvl_seq: { SyckLevel *parent = syck_emitter_parent_level( e ); /* seq-in-map shortcut */ if ( parent->status == syck_lvl_map && lvl->ncount == 0 ) { /* complex key */ if ( parent->ncount % 2 == 1 ) { syck_emitter_write( e, "?", 1 ); parent->status = syck_lvl_mapx; /* shortcut -- the lvl->anctag check should be unneccesary but * there is a nasty shift/reduce in the parser on this point and * i'm not ready to tickle it. */ } else if ( lvl->anctag == 0 ) { lvl->spaces = parent->spaces; } } /* seq-in-seq shortcut */ else if ( lvl->anctag == 0 && parent->status == syck_lvl_seq && lvl->ncount == 0 ) { int spcs = ( lvl->spaces - parent->spaces ) - 2; if ( spcs >= 0 ) { int i = 0; for ( i = 0; i < spcs; i++ ) { syck_emitter_write( e, " ", 1 ); } syck_emitter_write( e, "- ", 2 ); break; } } syck_emit_indent( e ); syck_emitter_write( e, "- ", 2 ); } break; case syck_lvl_iseq: { if ( lvl->ncount > 0 ) { syck_emitter_write( e, ", ", 2 ); } } break; case syck_lvl_map: { SyckLevel *parent = syck_emitter_parent_level( e ); /* map-in-map */ if ( parent->status == syck_lvl_map && lvl->ncount == 0 ) { /* complex key */ if ( parent->ncount % 2 == 1 ) { syck_emitter_write( e, "?", 1 ); parent->status = syck_lvl_mapx; } } /* map-in-seq shortcut */ if ( lvl->anctag == 0 && parent->status == syck_lvl_seq && lvl->ncount == 0 ) { int spcs = ( lvl->spaces - parent->spaces ) - 2; if ( spcs >= 0 ) { int i = 0; for ( i = 0; i < spcs; i++ ) { syck_emitter_write( e, " ", 1 ); } break; } } if ( lvl->ncount % 2 == 0 ) { syck_emit_indent( e ); } else { syck_emitter_write( e, ": ", 2 ); } } break; case syck_lvl_mapx: { if ( lvl->ncount % 2 == 0 ) { syck_emit_indent( e ); lvl->status = syck_lvl_map; } else { int i; if ( lvl->spaces > 0 ) { char *spcs = S_ALLOC_N( char, lvl->spaces + 1 ); spcs[lvl->spaces] = '\0'; for ( i = 0; i < lvl->spaces; i++ ) spcs[i] = ' '; syck_emitter_write( e, spcs, lvl->spaces ); S_FREE( spcs ); } syck_emitter_write( e, ": ", 2 ); } } break; case syck_lvl_imap: { if ( lvl->ncount > 0 ) { if ( lvl->ncount % 2 == 0 ) { syck_emitter_write( e, ", ", 2 ); } else { syck_emitter_write( e, ": ", 2 ); } } } break; } lvl->ncount++; syck_emit( e, n ); } /* * Closes emission of a collection. */ void syck_emit_end( SyckEmitter *e ) { SyckLevel *lvl = syck_emitter_current_level( e ); SyckLevel *parent = syck_emitter_parent_level( e ); switch ( lvl->status ) { case syck_lvl_seq: if ( lvl->ncount == 0 ) { syck_emitter_write( e, "[]\n", 3 ); } else if ( parent->status == syck_lvl_mapx ) { syck_emitter_write( e, "\n", 1 ); } break; case syck_lvl_iseq: syck_emitter_write( e, "]\n", 1 ); break; case syck_lvl_map: if ( lvl->ncount == 0 ) { syck_emitter_write( e, "{}\n", 3 ); } else if ( lvl->ncount % 2 == 1 ) { syck_emitter_write( e, ":\n", 1 ); } else if ( parent->status == syck_lvl_mapx ) { syck_emitter_write( e, "\n", 1 ); } break; case syck_lvl_imap: syck_emitter_write( e, "}\n", 1 ); break; } } /* * Fill markers table with emitter nodes in the * soon-to-be-emitted tree. */ SYMID syck_emitter_mark_node( SyckEmitter *e, st_data_t n ) { SYMID oid = 0; char *anchor_name = NULL; /* * Ensure markers table is initialized. */ if ( e->markers == NULL ) { e->markers = st_init_numtable(); } /* * Markers table initially marks the string position of the * object. Doesn't yet create an anchor, simply notes the * position. */ if ( ! st_lookup( e->markers, n, (st_data_t *)&oid ) ) { /* * Store all markers */ oid = e->markers->num_entries + 1; st_insert( e->markers, n, (st_data_t)oid ); } else { if ( e->anchors == NULL ) { e->anchors = st_init_numtable(); } if ( ! st_lookup( e->anchors, (st_data_t)oid, (st_data_t *)&anchor_name ) ) { int idx = 0; char *anc = ( e->anchor_format == NULL ? DEFAULT_ANCHOR_FORMAT : e->anchor_format ); /* * Second time hitting this object, let's give it an anchor */ idx = e->anchors->num_entries + 1; anchor_name = S_ALLOC_N( char, strlen( anc ) + 10 ); S_MEMZERO( anchor_name, char, strlen( anc ) + 10 ); sprintf( anchor_name, anc, idx ); /* * Insert into anchors table */ st_insert( e->anchors, (st_data_t)oid, (st_data_t)anchor_name ); } } return oid; } ruby-mkrf-0.2.3.orig/test/sample_files/syck-0.55/lib/syck.h0000644000000000000000000002767711672453175021775 0ustar rootroot/* * syck.h * * $Author: why $ * $Date: 2005/04/13 06:27:54 $ * * Copyright (C) 2003 why the lucky stiff */ #ifndef SYCK_H #define SYCK_H #define SYCK_YAML_MAJOR 1 #define SYCK_YAML_MINOR 0 #define SYCK_VERSION "0.55" #define YAML_DOMAIN "yaml.org,2002" #include #include #ifdef HAVE_ST_H #include #else #include "syck_st.h" #endif #if defined(__cplusplus) extern "C" { #endif /* * Memory Allocation */ #if defined(HAVE_ALLOCA_H) && !defined(__GNUC__) #include #endif #if DEBUG void syck_assert( char *, unsigned ); # define ASSERT(f) \ if ( f ) \ {} \ else \ syck_assert( __FILE__, __LINE__ ) #else # define ASSERT(f) #endif #ifndef NULL # define NULL (void *)0 #endif #define ALLOC_CT 8 #define SYCK_BUFFERSIZE 4096 #define S_ALLOC_N(type,n) (type*)malloc(sizeof(type)*(n)) #define S_ALLOC(type) (type*)malloc(sizeof(type)) #define S_REALLOC_N(var,type,n) (var)=(type*)realloc((char*)(var),sizeof(type)*(n)) #define S_FREE(n) free(n); n = NULL; #define S_ALLOCA_N(type,n) (type*)alloca(sizeof(type)*(n)) #define S_MEMZERO(p,type,n) memset((p), 0, sizeof(type)*(n)) #define S_MEMCPY(p1,p2,type,n) memcpy((p1), (p2), sizeof(type)*(n)) #define S_MEMMOVE(p1,p2,type,n) memmove((p1), (p2), sizeof(type)*(n)) #define S_MEMCMP(p1,p2,type,n) memcmp((p1), (p2), sizeof(type)*(n)) #define BLOCK_FOLD 10 #define BLOCK_LIT 20 #define BLOCK_PLAIN 30 #define NL_CHOMP 40 #define NL_KEEP 50 /* * Node definitions */ #ifndef ST_DATA_T_DEFINED typedef long st_data_t; #endif #define SYMID unsigned long typedef struct _syck_node SyckNode; enum syck_kind_tag { syck_map_kind, syck_seq_kind, syck_str_kind }; enum map_part { map_key, map_value }; enum map_style { map_none, map_inline }; enum seq_style { seq_none, seq_inline }; enum scalar_style { scalar_none, scalar_1quote, scalar_2quote, scalar_fold, scalar_literal, scalar_plain }; /* * Node metadata struct */ struct _syck_node { /* Symbol table ID */ SYMID id; /* Underlying kind */ enum syck_kind_tag kind; /* Fully qualified tag-uri for type */ char *type_id; /* Anchor name */ char *anchor; union { /* Storage for map data */ struct SyckMap { enum map_style style; SYMID *keys; SYMID *values; long capa; long idx; } *pairs; /* Storage for sequence data */ struct SyckSeq { enum seq_style style; SYMID *items; long capa; long idx; } *list; /* Storage for string data */ struct SyckStr { enum scalar_style style; char *ptr; long len; } *str; } data; /* Shortcut node */ void *shortcut; }; /* * Parser definitions */ typedef struct _syck_parser SyckParser; typedef struct _syck_file SyckIoFile; typedef struct _syck_str SyckIoStr; typedef struct _syck_level SyckLevel; typedef SYMID (*SyckNodeHandler)(SyckParser *, SyckNode *); typedef void (*SyckErrorHandler)(SyckParser *, char *); typedef SyckNode * (*SyckBadAnchorHandler)(SyckParser *, char *); typedef long (*SyckIoFileRead)(char *, SyckIoFile *, long, long); typedef long (*SyckIoStrRead)(char *, SyckIoStr *, long, long); enum syck_io_type { syck_io_str, syck_io_file }; enum syck_parser_input { syck_yaml_utf8, syck_yaml_utf16, syck_yaml_utf32, syck_bytecode_utf8 }; enum syck_level_status { syck_lvl_header, syck_lvl_doc, syck_lvl_open, syck_lvl_seq, syck_lvl_map, syck_lvl_block, syck_lvl_str, syck_lvl_iseq, syck_lvl_imap, syck_lvl_end, syck_lvl_pause, syck_lvl_anctag, syck_lvl_mapx, syck_lvl_seqx }; /* * Parser structs */ struct _syck_file { /* File pointer */ FILE *ptr; /* Function which FILE -> buffer */ SyckIoFileRead read; }; struct _syck_str { /* String buffer pointers */ char *beg, *ptr, *end; /* Function which string -> buffer */ SyckIoStrRead read; }; struct _syck_level { /* Indent */ int spaces; /* Counts nodes emitted at this level, useful for parsing * keys and pairs in bytecode */ int ncount; /* Does node have anchors or tags? */ int anctag; /* Domain prefixing at the given level */ char *domain; /* Keeps a node status */ enum syck_level_status status; }; struct _syck_parser { /* Root node */ SYMID root, root_on_error; /* Implicit typing flag */ int implicit_typing, taguri_expansion; /* Scripting language function to handle nodes */ SyckNodeHandler handler; /* Error handler */ SyckErrorHandler error_handler; /* InvalidAnchor handler */ SyckBadAnchorHandler bad_anchor_handler; /* Parser input type */ enum syck_parser_input input_type; /* IO type */ enum syck_io_type io_type; /* Custom buffer size */ size_t bufsize; /* Buffer pointers */ char *buffer, *linectptr, *lineptr, *toktmp, *token, *cursor, *marker, *limit; /* Line counter */ int linect; /* Last token from yylex() */ int last_token; /* Force a token upon next call to yylex() */ int force_token; /* EOF flag */ int eof; union { SyckIoFile *file; SyckIoStr *str; } io; /* Symbol table for anchors */ st_table *anchors, *bad_anchors; /* Optional symbol table for SYMIDs */ st_table *syms; /* Levels of indentation */ SyckLevel *levels; int lvl_idx; int lvl_capa; /* Pointer for extension's use */ void *bonus; }; /* * Emitter definitions */ typedef struct _syck_emitter SyckEmitter; typedef struct _syck_emitter_node SyckEmitterNode; typedef void (*SyckOutputHandler)(SyckEmitter *, char *, long); typedef void (*SyckEmitterHandler)(SyckEmitter *, st_data_t); enum doc_stage { doc_open, doc_need_header, doc_processing }; /* * Emitter struct */ struct _syck_emitter { /* Headerless doc flag */ int headless; /* Force header? */ int use_header; /* Force version? */ int use_version; /* Sort hash keys */ int sort_keys; /* Anchor format */ char *anchor_format; /* Explicit typing on all collections? */ int explicit_typing; /* Best width on folded scalars */ int best_width; /* Use literal[1] or folded[2] blocks on all text? */ enum scalar_style style; /* Stage of written document */ enum doc_stage stage; /* Level counter */ int level; /* Default indentation */ int indent; /* Object ignore ID */ SYMID ignore_id; /* Symbol table for anchors */ st_table *markers, *anchors, *anchored; /* Custom buffer size */ size_t bufsize; /* Buffer */ char *buffer, *marker; /* Absolute position of the buffer */ long bufpos; /* Handler for emitter nodes */ SyckEmitterHandler emitter_handler; /* Handler for output */ SyckOutputHandler output_handler; /* Levels of indentation */ SyckLevel *levels; int lvl_idx; int lvl_capa; /* Pointer for extension's use */ void *bonus; }; /* * Emitter node metadata struct */ struct _syck_emitter_node { /* Node buffer position */ long pos; /* Current indent */ long indent; /* Collection? */ int is_shortcut; }; /* * Handler prototypes */ SYMID syck_hdlr_add_node( SyckParser *, SyckNode * ); SyckNode *syck_hdlr_add_anchor( SyckParser *, char *, SyckNode * ); void syck_hdlr_remove_anchor( SyckParser *, char * ); SyckNode *syck_hdlr_get_anchor( SyckParser *, char * ); void syck_add_transfer( char *, SyckNode *, int ); char *syck_xprivate( char *, int ); char *syck_taguri( char *, char *, int ); int syck_tagcmp( char *, char * ); int syck_add_sym( SyckParser *, char * ); int syck_lookup_sym( SyckParser *, SYMID, char ** ); int syck_try_implicit( SyckNode * ); char *syck_type_id_to_uri( char * ); void try_tag_implicit( SyckNode *, int ); char *syck_match_implicit( char *, size_t ); /* * API prototypes */ char *syck_strndup( char *, long ); long syck_io_file_read( char *, SyckIoFile *, long, long ); long syck_io_str_read( char *, SyckIoStr *, long, long ); char *syck_base64enc( char *, long ); char *syck_base64dec( char *, long ); SyckEmitter *syck_new_emitter(); SYMID syck_emitter_mark_node( SyckEmitter *, st_data_t ); void syck_emitter_ignore_id( SyckEmitter *, SYMID ); void syck_output_handler( SyckEmitter *, SyckOutputHandler ); void syck_emitter_handler( SyckEmitter *, SyckEmitterHandler ); void syck_free_emitter( SyckEmitter * ); void syck_emitter_clear( SyckEmitter * ); void syck_emitter_write( SyckEmitter *, char *, long ); void syck_emitter_escape( SyckEmitter *, char *, long ); void syck_emitter_flush( SyckEmitter *, long ); void syck_emit( SyckEmitter *, st_data_t ); void syck_emit_scalar( SyckEmitter *, char *, enum scalar_style, int, int, char, char *, long ); void syck_emit_1quoted( SyckEmitter *, int, char *, long ); void syck_emit_2quoted( SyckEmitter *, int, char *, long ); void syck_emit_folded( SyckEmitter *, int, char, char *, long ); void syck_emit_literal( SyckEmitter *, char, char *, long ); void syck_emit_seq( SyckEmitter *, char *, enum seq_style ); void syck_emit_item( SyckEmitter *, st_data_t ); void syck_emit_map( SyckEmitter *, char *, enum map_style ); void syck_emit_end( SyckEmitter * ); void syck_emit_tag( SyckEmitter *, char *, char * ); void syck_emit_indent( SyckEmitter * ); SyckLevel *syck_emitter_current_level( SyckEmitter * ); SyckLevel *syck_emitter_parent_level( SyckEmitter * ); void syck_emitter_pop_level( SyckEmitter * ); void syck_emitter_add_level( SyckEmitter *, int, enum syck_level_status ); void syck_emitter_reset_levels( SyckEmitter * ); SyckParser *syck_new_parser(); void syck_free_parser( SyckParser * ); void syck_parser_set_root_on_error( SyckParser *, SYMID ); void syck_parser_implicit_typing( SyckParser *, int ); void syck_parser_taguri_expansion( SyckParser *, int ); int syck_scan_scalar( int, char *, long ); void syck_parser_handler( SyckParser *, SyckNodeHandler ); void syck_parser_error_handler( SyckParser *, SyckErrorHandler ); void syck_parser_bad_anchor_handler( SyckParser *, SyckBadAnchorHandler ); void syck_parser_file( SyckParser *, FILE *, SyckIoFileRead ); void syck_parser_str( SyckParser *, char *, long, SyckIoStrRead ); void syck_parser_str_auto( SyckParser *, char *, SyckIoStrRead ); SyckLevel *syck_parser_current_level( SyckParser * ); void syck_parser_add_level( SyckParser *, int, enum syck_level_status ); void syck_parser_pop_level( SyckParser * ); void free_any_io( SyckParser * ); long syck_parser_read( SyckParser * ); long syck_parser_readlen( SyckParser *, long ); SYMID syck_parse( SyckParser * ); void syck_default_error_handler( SyckParser *, char * ); SYMID syck_yaml2byte_handler( SyckParser *, SyckNode * ); char *syck_yaml2byte( char * ); /* * Allocation prototypes */ SyckNode *syck_alloc_map(); SyckNode *syck_alloc_seq(); SyckNode *syck_alloc_str(); void syck_free_node( SyckNode * ); void syck_free_members( SyckNode * ); SyckNode *syck_new_str( char *, enum scalar_style ); SyckNode *syck_new_str2( char *, long, enum scalar_style ); void syck_replace_str( SyckNode *, char *, enum scalar_style ); void syck_replace_str2( SyckNode *, char *, long, enum scalar_style ); void syck_str_blow_away_commas( SyckNode * ); char *syck_str_read( SyckNode * ); SyckNode *syck_new_map( SYMID, SYMID ); void syck_map_empty( SyckNode * ); void syck_map_add( SyckNode *, SYMID, SYMID ); SYMID syck_map_read( SyckNode *, enum map_part, long ); void syck_map_assign( SyckNode *, enum map_part, long, SYMID ); long syck_map_count( SyckNode * ); void syck_map_update( SyckNode *, SyckNode * ); SyckNode *syck_new_seq( SYMID ); void syck_seq_empty( SyckNode * ); void syck_seq_add( SyckNode *, SYMID ); void syck_seq_assign( SyckNode *, long, SYMID ); SYMID syck_seq_read( SyckNode *, long ); long syck_seq_count( SyckNode * ); /* * Lexer prototypes */ void syckerror( char * ); #if defined(__cplusplus) } /* extern "C" { */ #endif #endif /* ifndef SYCK_H */ ruby-mkrf-0.2.3.orig/test/sample_files/syck-0.55/lib/yamlbyte.h0000644000000000000000000001506011672453175022631 0ustar rootroot/* yamlbyte.h * * The YAML bytecode "C" interface header file. See the YAML bytecode * reference for bytecode sequence rules and for the meaning of each * bytecode. */ #ifndef YAMLBYTE_H #define YAMLBYTE_H #include /* define what a character is */ typedef unsigned char yamlbyte_utf8_t; typedef unsigned short yamlbyte_utf16_t; #ifdef YAMLBYTE_UTF8 #ifdef YAMLBYTE_UTF16 #error Must only define YAMLBYTE_UTF8 or YAMLBYTE_UTF16 #endif typedef yamlbyte_utf8_t yamlbyte_char_t; #else #ifdef YAMLBYTE_UTF16 typedef yamlbyte_utf16_t yamlbyte_char_t; #else #error Must define YAMLBYTE_UTF8 or YAMLBYTE_UTF16 #endif #endif /* specify list of bytecodes */ #define YAMLBYTE_FINISH ((yamlbyte_char_t) 0) #define YAMLBYTE_DOCUMENT ((yamlbyte_char_t)'D') #define YAMLBYTE_DIRECTIVE ((yamlbyte_char_t)'V') #define YAMLBYTE_PAUSE ((yamlbyte_char_t)'P') #define YAMLBYTE_MAPPING ((yamlbyte_char_t)'M') #define YAMLBYTE_SEQUENCE ((yamlbyte_char_t)'Q') #define YAMLBYTE_END_BRANCH ((yamlbyte_char_t)'E') #define YAMLBYTE_SCALAR ((yamlbyte_char_t)'S') #define YAMLBYTE_CONTINUE ((yamlbyte_char_t)'C') #define YAMLBYTE_NEWLINE ((yamlbyte_char_t)'N') #define YAMLBYTE_NULLCHAR ((yamlbyte_char_t)'Z') #define YAMLBYTE_ANCHOR ((yamlbyte_char_t)'A') #define YAMLBYTE_ALIAS ((yamlbyte_char_t)'R') #define YAMLBYTE_TRANSFER ((yamlbyte_char_t)'T') /* formatting bytecodes */ #define YAMLBYTE_COMMENT ((yamlbyte_char_t)'c') #define YAMLBYTE_INDENT ((yamlbyte_char_t)'i') #define YAMLBYTE_STYLE ((yamlbyte_char_t)'s') /* other bytecodes */ #define YAMLBYTE_LINE_NUMBER ((yamlbyte_char_t)'#') #define YAMLBYTE_WHOLE_SCALAR ((yamlbyte_char_t)'<') #define YAMLBYTE_NOTICE ((yamlbyte_char_t)'!') #define YAMLBYTE_SPAN ((yamlbyte_char_t)')') #define YAMLBYTE_ALLOC ((yamlbyte_char_t)'@') /* second level style bytecodes, ie "s>" */ #define YAMLBYTE_FLOW ((yamlbyte_char_t)'>') #define YAMLBYTE_LITERAL ((yamlbyte_char_t)'|') #define YAMLBYTE_BLOCK ((yamlbyte_char_t)'b') #define YAMLBYTE_PLAIN ((yamlbyte_char_t)'p') #define YAMLBYTE_INLINE_MAPPING ((yamlbyte_char_t)'{') #define YAMLBYTE_INLINE_SEQUENCE ((yamlbyte_char_t)'[') #define YAMLBYTE_SINGLE_QUOTED ((yamlbyte_char_t)39) #define YAMLBYTE_DOUBLE_QUOTED ((yamlbyte_char_t)'"') /* * The "C" API has two variants, one based on instructions, * with events delivered via pointers; and the other one * is character based where one or more instructions are * serialized into a buffer. * * Note: In the instruction based API, WHOLE_SCALAR does * not have the '. case $1 in '') echo "$0: No command. Try \`$0 --help' for more information." 1>&2 exit 1; ;; -h | --h*) cat <<\EOF Usage: depcomp [--help] [--version] PROGRAM [ARGS] Run PROGRAMS ARGS to compile a file, generating dependencies as side-effects. Environment variables: depmode Dependency tracking mode. source Source file read by `PROGRAMS ARGS'. object Object file output by `PROGRAMS ARGS'. DEPDIR directory where to store dependencies. depfile Dependency file to output. tmpdepfile Temporary file to use when outputing dependencies. libtool Whether libtool is used (yes/no). Report bugs to . EOF exit $? ;; -v | --v*) echo "depcomp $scriptversion" exit $? ;; esac if test -z "$depmode" || test -z "$source" || test -z "$object"; then echo "depcomp: Variables source, object and depmode must be set" 1>&2 exit 1 fi # Dependencies for sub/bar.o or sub/bar.obj go into sub/.deps/bar.Po. depfile=${depfile-`echo "$object" | sed 's|[^\\/]*$|'${DEPDIR-.deps}'/&|;s|\.\([^.]*\)$|.P\1|;s|Pobj$|Po|'`} tmpdepfile=${tmpdepfile-`echo "$depfile" | sed 's/\.\([^.]*\)$/.T\1/'`} rm -f "$tmpdepfile" # Some modes work just like other modes, but use different flags. We # parameterize here, but still list the modes in the big case below, # to make depend.m4 easier to write. Note that we *cannot* use a case # here, because this file can only contain one case statement. if test "$depmode" = hp; then # HP compiler uses -M and no extra arg. gccflag=-M depmode=gcc fi if test "$depmode" = dashXmstdout; then # This is just like dashmstdout with a different argument. dashmflag=-xM depmode=dashmstdout fi case "$depmode" in gcc3) ## gcc 3 implements dependency tracking that does exactly what ## we want. Yay! Note: for some reason libtool 1.4 doesn't like ## it if -MD -MP comes after the -MF stuff. Hmm. "$@" -MT "$object" -MD -MP -MF "$tmpdepfile" stat=$? if test $stat -eq 0; then : else rm -f "$tmpdepfile" exit $stat fi mv "$tmpdepfile" "$depfile" ;; gcc) ## There are various ways to get dependency output from gcc. Here's ## why we pick this rather obscure method: ## - Don't want to use -MD because we'd like the dependencies to end ## up in a subdir. Having to rename by hand is ugly. ## (We might end up doing this anyway to support other compilers.) ## - The DEPENDENCIES_OUTPUT environment variable makes gcc act like ## -MM, not -M (despite what the docs say). ## - Using -M directly means running the compiler twice (even worse ## than renaming). if test -z "$gccflag"; then gccflag=-MD, fi "$@" -Wp,"$gccflag$tmpdepfile" stat=$? if test $stat -eq 0; then : else rm -f "$tmpdepfile" exit $stat fi rm -f "$depfile" echo "$object : \\" > "$depfile" alpha=ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz ## The second -e expression handles DOS-style file names with drive letters. sed -e 's/^[^:]*: / /' \ -e 's/^['$alpha']:\/[^:]*: / /' < "$tmpdepfile" >> "$depfile" ## This next piece of magic avoids the `deleted header file' problem. ## The problem is that when a header file which appears in a .P file ## is deleted, the dependency causes make to die (because there is ## typically no way to rebuild the header). We avoid this by adding ## dummy dependencies for each header file. Too bad gcc doesn't do ## this for us directly. tr ' ' ' ' < "$tmpdepfile" | ## Some versions of gcc put a space before the `:'. On the theory ## that the space means something, we add a space to the output as ## well. ## Some versions of the HPUX 10.20 sed can't process this invocation ## correctly. Breaking it into two sed invocations is a workaround. sed -e 's/^\\$//' -e '/^$/d' -e '/:$/d' | sed -e 's/$/ :/' >> "$depfile" rm -f "$tmpdepfile" ;; hp) # This case exists only to let depend.m4 do its work. It works by # looking at the text of this script. This case will never be run, # since it is checked for above. exit 1 ;; sgi) if test "$libtool" = yes; then "$@" "-Wp,-MDupdate,$tmpdepfile" else "$@" -MDupdate "$tmpdepfile" fi stat=$? if test $stat -eq 0; then : else rm -f "$tmpdepfile" exit $stat fi rm -f "$depfile" if test -f "$tmpdepfile"; then # yes, the sourcefile depend on other files echo "$object : \\" > "$depfile" # Clip off the initial element (the dependent). Don't try to be # clever and replace this with sed code, as IRIX sed won't handle # lines with more than a fixed number of characters (4096 in # IRIX 6.2 sed, 8192 in IRIX 6.5). We also remove comment lines; # the IRIX cc adds comments like `#:fec' to the end of the # dependency line. tr ' ' ' ' < "$tmpdepfile" \ | sed -e 's/^.*\.o://' -e 's/#.*$//' -e '/^$/ d' | \ tr ' ' ' ' >> $depfile echo >> $depfile # The second pass generates a dummy entry for each header file. tr ' ' ' ' < "$tmpdepfile" \ | sed -e 's/^.*\.o://' -e 's/#.*$//' -e '/^$/ d' -e 's/$/:/' \ >> $depfile else # The sourcefile does not contain any dependencies, so just # store a dummy comment line, to avoid errors with the Makefile # "include basename.Plo" scheme. echo "#dummy" > "$depfile" fi rm -f "$tmpdepfile" ;; aix) # The C for AIX Compiler uses -M and outputs the dependencies # in a .u file. In older versions, this file always lives in the # current directory. Also, the AIX compiler puts `$object:' at the # start of each line; $object doesn't have directory information. # Version 6 uses the directory in both cases. stripped=`echo "$object" | sed 's/\(.*\)\..*$/\1/'` tmpdepfile="$stripped.u" if test "$libtool" = yes; then "$@" -Wc,-M else "$@" -M fi stat=$? if test -f "$tmpdepfile"; then : else stripped=`echo "$stripped" | sed 's,^.*/,,'` tmpdepfile="$stripped.u" fi if test $stat -eq 0; then : else rm -f "$tmpdepfile" exit $stat fi if test -f "$tmpdepfile"; then outname="$stripped.o" # Each line is of the form `foo.o: dependent.h'. # Do two passes, one to just change these to # `$object: dependent.h' and one to simply `dependent.h:'. sed -e "s,^$outname:,$object :," < "$tmpdepfile" > "$depfile" sed -e "s,^$outname: \(.*\)$,\1:," < "$tmpdepfile" >> "$depfile" else # The sourcefile does not contain any dependencies, so just # store a dummy comment line, to avoid errors with the Makefile # "include basename.Plo" scheme. echo "#dummy" > "$depfile" fi rm -f "$tmpdepfile" ;; icc) # Intel's C compiler understands `-MD -MF file'. However on # icc -MD -MF foo.d -c -o sub/foo.o sub/foo.c # ICC 7.0 will fill foo.d with something like # foo.o: sub/foo.c # foo.o: sub/foo.h # which is wrong. We want: # sub/foo.o: sub/foo.c # sub/foo.o: sub/foo.h # sub/foo.c: # sub/foo.h: # ICC 7.1 will output # foo.o: sub/foo.c sub/foo.h # and will wrap long lines using \ : # foo.o: sub/foo.c ... \ # sub/foo.h ... \ # ... "$@" -MD -MF "$tmpdepfile" stat=$? if test $stat -eq 0; then : else rm -f "$tmpdepfile" exit $stat fi rm -f "$depfile" # Each line is of the form `foo.o: dependent.h', # or `foo.o: dep1.h dep2.h \', or ` dep3.h dep4.h \'. # Do two passes, one to just change these to # `$object: dependent.h' and one to simply `dependent.h:'. sed "s,^[^:]*:,$object :," < "$tmpdepfile" > "$depfile" # Some versions of the HPUX 10.20 sed can't process this invocation # correctly. Breaking it into two sed invocations is a workaround. sed 's,^[^:]*: \(.*\)$,\1,;s/^\\$//;/^$/d;/:$/d' < "$tmpdepfile" | sed -e 's/$/ :/' >> "$depfile" rm -f "$tmpdepfile" ;; tru64) # The Tru64 compiler uses -MD to generate dependencies as a side # effect. `cc -MD -o foo.o ...' puts the dependencies into `foo.o.d'. # At least on Alpha/Redhat 6.1, Compaq CCC V6.2-504 seems to put # dependencies in `foo.d' instead, so we check for that too. # Subdirectories are respected. dir=`echo "$object" | sed -e 's|/[^/]*$|/|'` test "x$dir" = "x$object" && dir= base=`echo "$object" | sed -e 's|^.*/||' -e 's/\.o$//' -e 's/\.lo$//'` if test "$libtool" = yes; then # With Tru64 cc, shared objects can also be used to make a # static library. This mecanism is used in libtool 1.4 series to # handle both shared and static libraries in a single compilation. # With libtool 1.4, dependencies were output in $dir.libs/$base.lo.d. # # With libtool 1.5 this exception was removed, and libtool now # generates 2 separate objects for the 2 libraries. These two # compilations output dependencies in in $dir.libs/$base.o.d and # in $dir$base.o.d. We have to check for both files, because # one of the two compilations can be disabled. We should prefer # $dir$base.o.d over $dir.libs/$base.o.d because the latter is # automatically cleaned when .libs/ is deleted, while ignoring # the former would cause a distcleancheck panic. tmpdepfile1=$dir.libs/$base.lo.d # libtool 1.4 tmpdepfile2=$dir$base.o.d # libtool 1.5 tmpdepfile3=$dir.libs/$base.o.d # libtool 1.5 tmpdepfile4=$dir.libs/$base.d # Compaq CCC V6.2-504 "$@" -Wc,-MD else tmpdepfile1=$dir$base.o.d tmpdepfile2=$dir$base.d tmpdepfile3=$dir$base.d tmpdepfile4=$dir$base.d "$@" -MD fi stat=$? if test $stat -eq 0; then : else rm -f "$tmpdepfile1" "$tmpdepfile2" "$tmpdepfile3" "$tmpdepfile4" exit $stat fi for tmpdepfile in "$tmpdepfile1" "$tmpdepfile2" "$tmpdepfile3" "$tmpdepfile4" do test -f "$tmpdepfile" && break done if test -f "$tmpdepfile"; then sed -e "s,^.*\.[a-z]*:,$object:," < "$tmpdepfile" > "$depfile" # That's a tab and a space in the []. sed -e 's,^.*\.[a-z]*:[ ]*,,' -e 's,$,:,' < "$tmpdepfile" >> "$depfile" else echo "#dummy" > "$depfile" fi rm -f "$tmpdepfile" ;; #nosideeffect) # This comment above is used by automake to tell side-effect # dependency tracking mechanisms from slower ones. dashmstdout) # Important note: in order to support this mode, a compiler *must* # always write the preprocessed file to stdout, regardless of -o. "$@" || exit $? # Remove the call to Libtool. if test "$libtool" = yes; then while test $1 != '--mode=compile'; do shift done shift fi # Remove `-o $object'. IFS=" " for arg do case $arg in -o) shift ;; $object) shift ;; *) set fnord "$@" "$arg" shift # fnord shift # $arg ;; esac done test -z "$dashmflag" && dashmflag=-M # Require at least two characters before searching for `:' # in the target name. This is to cope with DOS-style filenames: # a dependency such as `c:/foo/bar' could be seen as target `c' otherwise. "$@" $dashmflag | sed 's:^[ ]*[^: ][^:][^:]*\:[ ]*:'"$object"'\: :' > "$tmpdepfile" rm -f "$depfile" cat < "$tmpdepfile" > "$depfile" tr ' ' ' ' < "$tmpdepfile" | \ ## Some versions of the HPUX 10.20 sed can't process this invocation ## correctly. Breaking it into two sed invocations is a workaround. sed -e 's/^\\$//' -e '/^$/d' -e '/:$/d' | sed -e 's/$/ :/' >> "$depfile" rm -f "$tmpdepfile" ;; dashXmstdout) # This case only exists to satisfy depend.m4. It is never actually # run, as this mode is specially recognized in the preamble. exit 1 ;; makedepend) "$@" || exit $? # Remove any Libtool call if test "$libtool" = yes; then while test $1 != '--mode=compile'; do shift done shift fi # X makedepend shift cleared=no for arg in "$@"; do case $cleared in no) set ""; shift cleared=yes ;; esac case "$arg" in -D*|-I*) set fnord "$@" "$arg"; shift ;; # Strip any option that makedepend may not understand. Remove # the object too, otherwise makedepend will parse it as a source file. -*|$object) ;; *) set fnord "$@" "$arg"; shift ;; esac done obj_suffix="`echo $object | sed 's/^.*\././'`" touch "$tmpdepfile" ${MAKEDEPEND-makedepend} -o"$obj_suffix" -f"$tmpdepfile" "$@" rm -f "$depfile" cat < "$tmpdepfile" > "$depfile" sed '1,2d' "$tmpdepfile" | tr ' ' ' ' | \ ## Some versions of the HPUX 10.20 sed can't process this invocation ## correctly. Breaking it into two sed invocations is a workaround. sed -e 's/^\\$//' -e '/^$/d' -e '/:$/d' | sed -e 's/$/ :/' >> "$depfile" rm -f "$tmpdepfile" "$tmpdepfile".bak ;; cpp) # Important note: in order to support this mode, a compiler *must* # always write the preprocessed file to stdout. "$@" || exit $? # Remove the call to Libtool. if test "$libtool" = yes; then while test $1 != '--mode=compile'; do shift done shift fi # Remove `-o $object'. IFS=" " for arg do case $arg in -o) shift ;; $object) shift ;; *) set fnord "$@" "$arg" shift # fnord shift # $arg ;; esac done "$@" -E | sed -n '/^# [0-9][0-9]* "\([^"]*\)".*/ s:: \1 \\:p' | sed '$ s: \\$::' > "$tmpdepfile" rm -f "$depfile" echo "$object : \\" > "$depfile" cat < "$tmpdepfile" >> "$depfile" sed < "$tmpdepfile" '/^$/d;s/^ //;s/ \\$//;s/$/ :/' >> "$depfile" rm -f "$tmpdepfile" ;; msvisualcpp) # Important note: in order to support this mode, a compiler *must* # always write the preprocessed file to stdout, regardless of -o, # because we must use -o when running libtool. "$@" || exit $? IFS=" " for arg do case "$arg" in "-Gm"|"/Gm"|"-Gi"|"/Gi"|"-ZI"|"/ZI") set fnord "$@" shift shift ;; *) set fnord "$@" "$arg" shift shift ;; esac done "$@" -E | sed -n '/^#line [0-9][0-9]* "\([^"]*\)"/ s::echo "`cygpath -u \\"\1\\"`":p' | sort | uniq > "$tmpdepfile" rm -f "$depfile" echo "$object : \\" > "$depfile" . "$tmpdepfile" | sed 's% %\\ %g' | sed -n '/^\(.*\)$/ s:: \1 \\:p' >> "$depfile" echo " " >> "$depfile" . "$tmpdepfile" | sed 's% %\\ %g' | sed -n '/^\(.*\)$/ s::\1\::p' >> "$depfile" rm -f "$tmpdepfile" ;; none) exec "$@" ;; *) echo "Unknown depmode $depmode" 1>&2 exit 1 ;; esac exit 0 # Local Variables: # mode: shell-script # sh-indentation: 2 # eval: (add-hook 'write-file-hooks 'time-stamp) # time-stamp-start: "scriptversion=" # time-stamp-format: "%:y-%02m-%02d.%02H" # time-stamp-end: "$" # End: ruby-mkrf-0.2.3.orig/test/sample_files/syck-0.55/config/install-sh0000644000000000000000000002202111672453175023325 0ustar rootroot#!/bin/sh # install - install a program, script, or datafile scriptversion=2005-02-02.21 # This originates from X11R5 (mit/util/scripts/install.sh), which was # later released in X11R6 (xc/config/util/install.sh) with the # following copyright and license. # # Copyright (C) 1994 X Consortium # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to # deal in the Software without restriction, including without limitation the # rights to use, copy, modify, merge, publish, distribute, sublicense, and/or # sell copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # X CONSORTIUM BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN # AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNEC- # TION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. # # Except as contained in this notice, the name of the X Consortium shall not # be used in advertising or otherwise to promote the sale, use or other deal- # ings in this Software without prior written authorization from the X Consor- # tium. # # # FSF changes to this file are in the public domain. # # Calling this script install-sh is preferred over install.sh, to prevent # `make' implicit rules from creating a file called install from it # when there is no Makefile. # # This script is compatible with the BSD install script, but was written # from scratch. It can only install one file at a time, a restriction # shared with many OS's install programs. # set DOITPROG to echo to test this script # Don't use :- since 4.3BSD and earlier shells don't like it. doit="${DOITPROG-}" # put in absolute paths if you don't have them in your path; or use env. vars. mvprog="${MVPROG-mv}" cpprog="${CPPROG-cp}" chmodprog="${CHMODPROG-chmod}" chownprog="${CHOWNPROG-chown}" chgrpprog="${CHGRPPROG-chgrp}" stripprog="${STRIPPROG-strip}" rmprog="${RMPROG-rm}" mkdirprog="${MKDIRPROG-mkdir}" chmodcmd="$chmodprog 0755" chowncmd= chgrpcmd= stripcmd= rmcmd="$rmprog -f" mvcmd="$mvprog" src= dst= dir_arg= dstarg= no_target_directory= usage="Usage: $0 [OPTION]... [-T] SRCFILE DSTFILE or: $0 [OPTION]... SRCFILES... DIRECTORY or: $0 [OPTION]... -t DIRECTORY SRCFILES... or: $0 [OPTION]... -d DIRECTORIES... In the 1st form, copy SRCFILE to DSTFILE. In the 2nd and 3rd, copy all SRCFILES to DIRECTORY. In the 4th, create DIRECTORIES. Options: -c (ignored) -d create directories instead of installing files. -g GROUP $chgrpprog installed files to GROUP. -m MODE $chmodprog installed files to MODE. -o USER $chownprog installed files to USER. -s $stripprog installed files. -t DIRECTORY install into DIRECTORY. -T report an error if DSTFILE is a directory. --help display this help and exit. --version display version info and exit. Environment variables override the default commands: CHGRPPROG CHMODPROG CHOWNPROG CPPROG MKDIRPROG MVPROG RMPROG STRIPPROG " while test -n "$1"; do case $1 in -c) shift continue;; -d) dir_arg=true shift continue;; -g) chgrpcmd="$chgrpprog $2" shift shift continue;; --help) echo "$usage"; exit $?;; -m) chmodcmd="$chmodprog $2" shift shift continue;; -o) chowncmd="$chownprog $2" shift shift continue;; -s) stripcmd=$stripprog shift continue;; -t) dstarg=$2 shift shift continue;; -T) no_target_directory=true shift continue;; --version) echo "$0 $scriptversion"; exit $?;; *) # When -d is used, all remaining arguments are directories to create. # When -t is used, the destination is already specified. test -n "$dir_arg$dstarg" && break # Otherwise, the last argument is the destination. Remove it from $@. for arg do if test -n "$dstarg"; then # $@ is not empty: it contains at least $arg. set fnord "$@" "$dstarg" shift # fnord fi shift # arg dstarg=$arg done break;; esac done if test -z "$1"; then if test -z "$dir_arg"; then echo "$0: no input file specified." >&2 exit 1 fi # It's OK to call `install-sh -d' without argument. # This can happen when creating conditional directories. exit 0 fi for src do # Protect names starting with `-'. case $src in -*) src=./$src ;; esac if test -n "$dir_arg"; then dst=$src src= if test -d "$dst"; then mkdircmd=: chmodcmd= else mkdircmd=$mkdirprog fi else # Waiting for this to be detected by the "$cpprog $src $dsttmp" command # might cause directories to be created, which would be especially bad # if $src (and thus $dsttmp) contains '*'. if test ! -f "$src" && test ! -d "$src"; then echo "$0: $src does not exist." >&2 exit 1 fi if test -z "$dstarg"; then echo "$0: no destination specified." >&2 exit 1 fi dst=$dstarg # Protect names starting with `-'. case $dst in -*) dst=./$dst ;; esac # If destination is a directory, append the input filename; won't work # if double slashes aren't ignored. if test -d "$dst"; then if test -n "$no_target_directory"; then echo "$0: $dstarg: Is a directory" >&2 exit 1 fi dst=$dst/`basename "$src"` fi fi # This sed command emulates the dirname command. dstdir=`echo "$dst" | sed -e 's,/*$,,;s,[^/]*$,,;s,/*$,,;s,^$,.,'` # Make sure that the destination directory exists. # Skip lots of stat calls in the usual case. if test ! -d "$dstdir"; then defaultIFS=' ' IFS="${IFS-$defaultIFS}" oIFS=$IFS # Some sh's can't handle IFS=/ for some reason. IFS='%' set x `echo "$dstdir" | sed -e 's@/@%@g' -e 's@^%@/@'` shift IFS=$oIFS pathcomp= while test $# -ne 0 ; do pathcomp=$pathcomp$1 shift if test ! -d "$pathcomp"; then $mkdirprog "$pathcomp" # mkdir can fail with a `File exist' error in case several # install-sh are creating the directory concurrently. This # is OK. test -d "$pathcomp" || exit fi pathcomp=$pathcomp/ done fi if test -n "$dir_arg"; then $doit $mkdircmd "$dst" \ && { test -z "$chowncmd" || $doit $chowncmd "$dst"; } \ && { test -z "$chgrpcmd" || $doit $chgrpcmd "$dst"; } \ && { test -z "$stripcmd" || $doit $stripcmd "$dst"; } \ && { test -z "$chmodcmd" || $doit $chmodcmd "$dst"; } else dstfile=`basename "$dst"` # Make a couple of temp file names in the proper directory. dsttmp=$dstdir/_inst.$$_ rmtmp=$dstdir/_rm.$$_ # Trap to clean up those temp files at exit. trap 'ret=$?; rm -f "$dsttmp" "$rmtmp" && exit $ret' 0 trap '(exit $?); exit' 1 2 13 15 # Copy the file name to the temp name. $doit $cpprog "$src" "$dsttmp" && # and set any options; do chmod last to preserve setuid bits. # # If any of these fail, we abort the whole thing. If we want to # ignore errors from any of these, just make sure not to ignore # errors from the above "$doit $cpprog $src $dsttmp" command. # { test -z "$chowncmd" || $doit $chowncmd "$dsttmp"; } \ && { test -z "$chgrpcmd" || $doit $chgrpcmd "$dsttmp"; } \ && { test -z "$stripcmd" || $doit $stripcmd "$dsttmp"; } \ && { test -z "$chmodcmd" || $doit $chmodcmd "$dsttmp"; } && # Now rename the file to the real destination. { $doit $mvcmd -f "$dsttmp" "$dstdir/$dstfile" 2>/dev/null \ || { # The rename failed, perhaps because mv can't rename something else # to itself, or perhaps because mv is so ancient that it does not # support -f. # Now remove or move aside any old file at destination location. # We try this two ways since rm can't unlink itself on some # systems and the destination file might be busy for other # reasons. In this case, the final cleanup might fail but the new # file should still install successfully. { if test -f "$dstdir/$dstfile"; then $doit $rmcmd -f "$dstdir/$dstfile" 2>/dev/null \ || $doit $mvcmd -f "$dstdir/$dstfile" "$rmtmp" 2>/dev/null \ || { echo "$0: cannot unlink or rename $dstdir/$dstfile" >&2 (exit 1); exit 1 } else : fi } && # Now rename the file to the real destination. $doit $mvcmd "$dsttmp" "$dstdir/$dstfile" } } fi || { (exit 1); exit 1; } done # The final little trick to "correctly" pass the exit status to the exit trap. { (exit 0); exit 0 } # Local variables: # eval: (add-hook 'write-file-hooks 'time-stamp) # time-stamp-start: "scriptversion=" # time-stamp-format: "%:y-%02m-%02d.%02H" # time-stamp-end: "$" # End: ruby-mkrf-0.2.3.orig/test/sample_files/syck-0.55/config/missing0000644000000000000000000002517011672453175022730 0ustar rootroot#! /bin/sh # Common stub for a few missing GNU programs while installing. scriptversion=2005-02-08.22 # Copyright (C) 1996, 1997, 1999, 2000, 2002, 2003, 2004, 2005 # Free Software Foundation, Inc. # Originally by Fran,cois Pinard , 1996. # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2, or (at your option) # any later version. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA # 02111-1307, USA. # As a special exception to the GNU General Public License, if you # distribute this file as part of a program that contains a # configuration script generated by Autoconf, you may include it under # the same distribution terms that you use for the rest of that program. if test $# -eq 0; then echo 1>&2 "Try \`$0 --help' for more information" exit 1 fi run=: # In the cases where this matters, `missing' is being run in the # srcdir already. if test -f configure.ac; then configure_ac=configure.ac else configure_ac=configure.in fi msg="missing on your system" case "$1" in --run) # Try to run requested program, and just exit if it succeeds. run= shift "$@" && exit 0 # Exit code 63 means version mismatch. This often happens # when the user try to use an ancient version of a tool on # a file that requires a minimum version. In this case we # we should proceed has if the program had been absent, or # if --run hadn't been passed. if test $? = 63; then run=: msg="probably too old" fi ;; -h|--h|--he|--hel|--help) echo "\ $0 [OPTION]... PROGRAM [ARGUMENT]... Handle \`PROGRAM [ARGUMENT]...' for when PROGRAM is missing, or return an error status if there is no known handling for PROGRAM. Options: -h, --help display this help and exit -v, --version output version information and exit --run try to run the given command, and emulate it if it fails Supported PROGRAM values: aclocal touch file \`aclocal.m4' autoconf touch file \`configure' autoheader touch file \`config.h.in' automake touch all \`Makefile.in' files bison create \`y.tab.[ch]', if possible, from existing .[ch] flex create \`lex.yy.c', if possible, from existing .c help2man touch the output file lex create \`lex.yy.c', if possible, from existing .c makeinfo touch the output file tar try tar, gnutar, gtar, then tar without non-portable flags yacc create \`y.tab.[ch]', if possible, from existing .[ch] Send bug reports to ." exit $? ;; -v|--v|--ve|--ver|--vers|--versi|--versio|--version) echo "missing $scriptversion (GNU Automake)" exit $? ;; -*) echo 1>&2 "$0: Unknown \`$1' option" echo 1>&2 "Try \`$0 --help' for more information" exit 1 ;; esac # Now exit if we have it, but it failed. Also exit now if we # don't have it and --version was passed (most likely to detect # the program). case "$1" in lex|yacc) # Not GNU programs, they don't have --version. ;; tar) if test -n "$run"; then echo 1>&2 "ERROR: \`tar' requires --run" exit 1 elif test "x$2" = "x--version" || test "x$2" = "x--help"; then exit 1 fi ;; *) if test -z "$run" && ($1 --version) > /dev/null 2>&1; then # We have it, but it failed. exit 1 elif test "x$2" = "x--version" || test "x$2" = "x--help"; then # Could not run --version or --help. This is probably someone # running `$TOOL --version' or `$TOOL --help' to check whether # $TOOL exists and not knowing $TOOL uses missing. exit 1 fi ;; esac # If it does not exist, or fails to run (possibly an outdated version), # try to emulate it. case "$1" in aclocal*) echo 1>&2 "\ WARNING: \`$1' is $msg. You should only need it if you modified \`acinclude.m4' or \`${configure_ac}'. You might want to install the \`Automake' and \`Perl' packages. Grab them from any GNU archive site." touch aclocal.m4 ;; autoconf) echo 1>&2 "\ WARNING: \`$1' is $msg. You should only need it if you modified \`${configure_ac}'. You might want to install the \`Autoconf' and \`GNU m4' packages. Grab them from any GNU archive site." touch configure ;; autoheader) echo 1>&2 "\ WARNING: \`$1' is $msg. You should only need it if you modified \`acconfig.h' or \`${configure_ac}'. You might want to install the \`Autoconf' and \`GNU m4' packages. Grab them from any GNU archive site." files=`sed -n 's/^[ ]*A[CM]_CONFIG_HEADER(\([^)]*\)).*/\1/p' ${configure_ac}` test -z "$files" && files="config.h" touch_files= for f in $files; do case "$f" in *:*) touch_files="$touch_files "`echo "$f" | sed -e 's/^[^:]*://' -e 's/:.*//'`;; *) touch_files="$touch_files $f.in";; esac done touch $touch_files ;; automake*) echo 1>&2 "\ WARNING: \`$1' is $msg. You should only need it if you modified \`Makefile.am', \`acinclude.m4' or \`${configure_ac}'. You might want to install the \`Automake' and \`Perl' packages. Grab them from any GNU archive site." find . -type f -name Makefile.am -print | sed 's/\.am$/.in/' | while read f; do touch "$f"; done ;; autom4te) echo 1>&2 "\ WARNING: \`$1' is needed, but is $msg. You might have modified some files without having the proper tools for further handling them. You can get \`$1' as part of \`Autoconf' from any GNU archive site." file=`echo "$*" | sed -n 's/.*--output[ =]*\([^ ]*\).*/\1/p'` test -z "$file" && file=`echo "$*" | sed -n 's/.*-o[ ]*\([^ ]*\).*/\1/p'` if test -f "$file"; then touch $file else test -z "$file" || exec >$file echo "#! /bin/sh" echo "# Created by GNU Automake missing as a replacement of" echo "# $ $@" echo "exit 0" chmod +x $file exit 1 fi ;; bison|yacc) echo 1>&2 "\ WARNING: \`$1' $msg. You should only need it if you modified a \`.y' file. You may need the \`Bison' package in order for those modifications to take effect. You can get \`Bison' from any GNU archive site." rm -f y.tab.c y.tab.h if [ $# -ne 1 ]; then eval LASTARG="\${$#}" case "$LASTARG" in *.y) SRCFILE=`echo "$LASTARG" | sed 's/y$/c/'` if [ -f "$SRCFILE" ]; then cp "$SRCFILE" y.tab.c fi SRCFILE=`echo "$LASTARG" | sed 's/y$/h/'` if [ -f "$SRCFILE" ]; then cp "$SRCFILE" y.tab.h fi ;; esac fi if [ ! -f y.tab.h ]; then echo >y.tab.h fi if [ ! -f y.tab.c ]; then echo 'main() { return 0; }' >y.tab.c fi ;; lex|flex) echo 1>&2 "\ WARNING: \`$1' is $msg. You should only need it if you modified a \`.l' file. You may need the \`Flex' package in order for those modifications to take effect. You can get \`Flex' from any GNU archive site." rm -f lex.yy.c if [ $# -ne 1 ]; then eval LASTARG="\${$#}" case "$LASTARG" in *.l) SRCFILE=`echo "$LASTARG" | sed 's/l$/c/'` if [ -f "$SRCFILE" ]; then cp "$SRCFILE" lex.yy.c fi ;; esac fi if [ ! -f lex.yy.c ]; then echo 'main() { return 0; }' >lex.yy.c fi ;; help2man) echo 1>&2 "\ WARNING: \`$1' is $msg. You should only need it if you modified a dependency of a manual page. You may need the \`Help2man' package in order for those modifications to take effect. You can get \`Help2man' from any GNU archive site." file=`echo "$*" | sed -n 's/.*-o \([^ ]*\).*/\1/p'` if test -z "$file"; then file=`echo "$*" | sed -n 's/.*--output=\([^ ]*\).*/\1/p'` fi if [ -f "$file" ]; then touch $file else test -z "$file" || exec >$file echo ".ab help2man is required to generate this page" exit 1 fi ;; makeinfo) echo 1>&2 "\ WARNING: \`$1' is $msg. You should only need it if you modified a \`.texi' or \`.texinfo' file, or any other file indirectly affecting the aspect of the manual. The spurious call might also be the consequence of using a buggy \`make' (AIX, DU, IRIX). You might want to install the \`Texinfo' package or the \`GNU make' package. Grab either from any GNU archive site." # The file to touch is that specified with -o ... file=`echo "$*" | sed -n 's/.*-o \([^ ]*\).*/\1/p'` if test -z "$file"; then # ... or it is the one specified with @setfilename ... infile=`echo "$*" | sed 's/.* \([^ ]*\) *$/\1/'` file=`sed -n '/^@setfilename/ { s/.* \([^ ]*\) *$/\1/; p; q; }' $infile` # ... or it is derived from the source name (dir/f.texi becomes f.info) test -z "$file" && file=`echo "$infile" | sed 's,.*/,,;s,.[^.]*$,,'`.info fi touch $file ;; tar) shift # We have already tried tar in the generic part. # Look for gnutar/gtar before invocation to avoid ugly error # messages. if (gnutar --version > /dev/null 2>&1); then gnutar "$@" && exit 0 fi if (gtar --version > /dev/null 2>&1); then gtar "$@" && exit 0 fi firstarg="$1" if shift; then case "$firstarg" in *o*) firstarg=`echo "$firstarg" | sed s/o//` tar "$firstarg" "$@" && exit 0 ;; esac case "$firstarg" in *h*) firstarg=`echo "$firstarg" | sed s/h//` tar "$firstarg" "$@" && exit 0 ;; esac fi echo 1>&2 "\ WARNING: I can't seem to be able to run \`tar' with the given arguments. You may want to install GNU tar or Free paxutils, or check the command line arguments." exit 1 ;; *) echo 1>&2 "\ WARNING: \`$1' is needed, and is $msg. You might have modified some files without having the proper tools for further handling them. Check the \`README' file, it often tells you about the needed prerequisites for installing this package. You may also peek at any GNU archive site, in case some other package would contain this missing \`$1' program." exit 1 ;; esac exit 0 # Local variables: # eval: (add-hook 'write-file-hooks 'time-stamp) # time-stamp-start: "scriptversion=" # time-stamp-format: "%:y-%02m-%02d.%02H" # time-stamp-end: "$" # End: ruby-mkrf-0.2.3.orig/test/sample_files/syck-0.55/aclocal.m40000644000000000000000000007662511672453175021742 0ustar rootroot# generated automatically by aclocal 1.9.5 -*- Autoconf -*- # Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, # 2005 Free Software Foundation, Inc. # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY, to the extent permitted by law; without # even the implied warranty of MERCHANTABILITY or FITNESS FOR A # PARTICULAR PURPOSE. # Copyright (C) 2002, 2003, 2005 Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # AM_AUTOMAKE_VERSION(VERSION) # ---------------------------- # Automake X.Y traces this macro to ensure aclocal.m4 has been # generated from the m4 files accompanying Automake X.Y. AC_DEFUN([AM_AUTOMAKE_VERSION], [am__api_version="1.9"]) # AM_SET_CURRENT_AUTOMAKE_VERSION # ------------------------------- # Call AM_AUTOMAKE_VERSION so it can be traced. # This function is AC_REQUIREd by AC_INIT_AUTOMAKE. AC_DEFUN([AM_SET_CURRENT_AUTOMAKE_VERSION], [AM_AUTOMAKE_VERSION([1.9.5])]) # AM_AUX_DIR_EXPAND -*- Autoconf -*- # Copyright (C) 2001, 2003, 2005 Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # For projects using AC_CONFIG_AUX_DIR([foo]), Autoconf sets # $ac_aux_dir to `$srcdir/foo'. In other projects, it is set to # `$srcdir', `$srcdir/..', or `$srcdir/../..'. # # Of course, Automake must honor this variable whenever it calls a # tool from the auxiliary directory. The problem is that $srcdir (and # therefore $ac_aux_dir as well) can be either absolute or relative, # depending on how configure is run. This is pretty annoying, since # it makes $ac_aux_dir quite unusable in subdirectories: in the top # source directory, any form will work fine, but in subdirectories a # relative path needs to be adjusted first. # # $ac_aux_dir/missing # fails when called from a subdirectory if $ac_aux_dir is relative # $top_srcdir/$ac_aux_dir/missing # fails if $ac_aux_dir is absolute, # fails when called from a subdirectory in a VPATH build with # a relative $ac_aux_dir # # The reason of the latter failure is that $top_srcdir and $ac_aux_dir # are both prefixed by $srcdir. In an in-source build this is usually # harmless because $srcdir is `.', but things will broke when you # start a VPATH build or use an absolute $srcdir. # # So we could use something similar to $top_srcdir/$ac_aux_dir/missing, # iff we strip the leading $srcdir from $ac_aux_dir. That would be: # am_aux_dir='\$(top_srcdir)/'`expr "$ac_aux_dir" : "$srcdir//*\(.*\)"` # and then we would define $MISSING as # MISSING="\${SHELL} $am_aux_dir/missing" # This will work as long as MISSING is not called from configure, because # unfortunately $(top_srcdir) has no meaning in configure. # However there are other variables, like CC, which are often used in # configure, and could therefore not use this "fixed" $ac_aux_dir. # # Another solution, used here, is to always expand $ac_aux_dir to an # absolute PATH. The drawback is that using absolute paths prevent a # configured tree to be moved without reconfiguration. AC_DEFUN([AM_AUX_DIR_EXPAND], [dnl Rely on autoconf to set up CDPATH properly. AC_PREREQ([2.50])dnl # expand $ac_aux_dir to an absolute path am_aux_dir=`cd $ac_aux_dir && pwd` ]) # AM_CONDITIONAL -*- Autoconf -*- # Copyright (C) 1997, 2000, 2001, 2003, 2004, 2005 # Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # serial 7 # AM_CONDITIONAL(NAME, SHELL-CONDITION) # ------------------------------------- # Define a conditional. AC_DEFUN([AM_CONDITIONAL], [AC_PREREQ(2.52)dnl ifelse([$1], [TRUE], [AC_FATAL([$0: invalid condition: $1])], [$1], [FALSE], [AC_FATAL([$0: invalid condition: $1])])dnl AC_SUBST([$1_TRUE]) AC_SUBST([$1_FALSE]) if $2; then $1_TRUE= $1_FALSE='#' else $1_TRUE='#' $1_FALSE= fi AC_CONFIG_COMMANDS_PRE( [if test -z "${$1_TRUE}" && test -z "${$1_FALSE}"; then AC_MSG_ERROR([[conditional "$1" was never defined. Usually this means the macro was only invoked conditionally.]]) fi])]) # Copyright (C) 1999, 2000, 2001, 2002, 2003, 2004, 2005 # Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # serial 8 # There are a few dirty hacks below to avoid letting `AC_PROG_CC' be # written in clear, in which case automake, when reading aclocal.m4, # will think it sees a *use*, and therefore will trigger all it's # C support machinery. Also note that it means that autoscan, seeing # CC etc. in the Makefile, will ask for an AC_PROG_CC use... # _AM_DEPENDENCIES(NAME) # ---------------------- # See how the compiler implements dependency checking. # NAME is "CC", "CXX", "GCJ", or "OBJC". # We try a few techniques and use that to set a single cache variable. # # We don't AC_REQUIRE the corresponding AC_PROG_CC since the latter was # modified to invoke _AM_DEPENDENCIES(CC); we would have a circular # dependency, and given that the user is not expected to run this macro, # just rely on AC_PROG_CC. AC_DEFUN([_AM_DEPENDENCIES], [AC_REQUIRE([AM_SET_DEPDIR])dnl AC_REQUIRE([AM_OUTPUT_DEPENDENCY_COMMANDS])dnl AC_REQUIRE([AM_MAKE_INCLUDE])dnl AC_REQUIRE([AM_DEP_TRACK])dnl ifelse([$1], CC, [depcc="$CC" am_compiler_list=], [$1], CXX, [depcc="$CXX" am_compiler_list=], [$1], OBJC, [depcc="$OBJC" am_compiler_list='gcc3 gcc'], [$1], GCJ, [depcc="$GCJ" am_compiler_list='gcc3 gcc'], [depcc="$$1" am_compiler_list=]) AC_CACHE_CHECK([dependency style of $depcc], [am_cv_$1_dependencies_compiler_type], [if test -z "$AMDEP_TRUE" && test -f "$am_depcomp"; then # We make a subdir and do the tests there. Otherwise we can end up # making bogus files that we don't know about and never remove. For # instance it was reported that on HP-UX the gcc test will end up # making a dummy file named `D' -- because `-MD' means `put the output # in D'. mkdir conftest.dir # Copy depcomp to subdir because otherwise we won't find it if we're # using a relative directory. cp "$am_depcomp" conftest.dir cd conftest.dir # We will build objects and dependencies in a subdirectory because # it helps to detect inapplicable dependency modes. For instance # both Tru64's cc and ICC support -MD to output dependencies as a # side effect of compilation, but ICC will put the dependencies in # the current directory while Tru64 will put them in the object # directory. mkdir sub am_cv_$1_dependencies_compiler_type=none if test "$am_compiler_list" = ""; then am_compiler_list=`sed -n ['s/^#*\([a-zA-Z0-9]*\))$/\1/p'] < ./depcomp` fi for depmode in $am_compiler_list; do # Setup a source with many dependencies, because some compilers # like to wrap large dependency lists on column 80 (with \), and # we should not choose a depcomp mode which is confused by this. # # We need to recreate these files for each test, as the compiler may # overwrite some of them when testing with obscure command lines. # This happens at least with the AIX C compiler. : > sub/conftest.c for i in 1 2 3 4 5 6; do echo '#include "conftst'$i'.h"' >> sub/conftest.c # Using `: > sub/conftst$i.h' creates only sub/conftst1.h with # Solaris 8's {/usr,}/bin/sh. touch sub/conftst$i.h done echo "${am__include} ${am__quote}sub/conftest.Po${am__quote}" > confmf case $depmode in nosideeffect) # after this tag, mechanisms are not by side-effect, so they'll # only be used when explicitly requested if test "x$enable_dependency_tracking" = xyes; then continue else break fi ;; none) break ;; esac # We check with `-c' and `-o' for the sake of the "dashmstdout" # mode. It turns out that the SunPro C++ compiler does not properly # handle `-M -o', and we need to detect this. if depmode=$depmode \ source=sub/conftest.c object=sub/conftest.${OBJEXT-o} \ depfile=sub/conftest.Po tmpdepfile=sub/conftest.TPo \ $SHELL ./depcomp $depcc -c -o sub/conftest.${OBJEXT-o} sub/conftest.c \ >/dev/null 2>conftest.err && grep sub/conftst6.h sub/conftest.Po > /dev/null 2>&1 && grep sub/conftest.${OBJEXT-o} sub/conftest.Po > /dev/null 2>&1 && ${MAKE-make} -s -f confmf > /dev/null 2>&1; then # icc doesn't choke on unknown options, it will just issue warnings # or remarks (even with -Werror). So we grep stderr for any message # that says an option was ignored or not supported. # When given -MP, icc 7.0 and 7.1 complain thusly: # icc: Command line warning: ignoring option '-M'; no argument required # The diagnosis changed in icc 8.0: # icc: Command line remark: option '-MP' not supported if (grep 'ignoring option' conftest.err || grep 'not supported' conftest.err) >/dev/null 2>&1; then :; else am_cv_$1_dependencies_compiler_type=$depmode break fi fi done cd .. rm -rf conftest.dir else am_cv_$1_dependencies_compiler_type=none fi ]) AC_SUBST([$1DEPMODE], [depmode=$am_cv_$1_dependencies_compiler_type]) AM_CONDITIONAL([am__fastdep$1], [ test "x$enable_dependency_tracking" != xno \ && test "$am_cv_$1_dependencies_compiler_type" = gcc3]) ]) # AM_SET_DEPDIR # ------------- # Choose a directory name for dependency files. # This macro is AC_REQUIREd in _AM_DEPENDENCIES AC_DEFUN([AM_SET_DEPDIR], [AC_REQUIRE([AM_SET_LEADING_DOT])dnl AC_SUBST([DEPDIR], ["${am__leading_dot}deps"])dnl ]) # AM_DEP_TRACK # ------------ AC_DEFUN([AM_DEP_TRACK], [AC_ARG_ENABLE(dependency-tracking, [ --disable-dependency-tracking speeds up one-time build --enable-dependency-tracking do not reject slow dependency extractors]) if test "x$enable_dependency_tracking" != xno; then am_depcomp="$ac_aux_dir/depcomp" AMDEPBACKSLASH='\' fi AM_CONDITIONAL([AMDEP], [test "x$enable_dependency_tracking" != xno]) AC_SUBST([AMDEPBACKSLASH]) ]) # Generate code to set up dependency tracking. -*- Autoconf -*- # Copyright (C) 1999, 2000, 2001, 2002, 2003, 2004, 2005 # Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. #serial 3 # _AM_OUTPUT_DEPENDENCY_COMMANDS # ------------------------------ AC_DEFUN([_AM_OUTPUT_DEPENDENCY_COMMANDS], [for mf in $CONFIG_FILES; do # Strip MF so we end up with the name of the file. mf=`echo "$mf" | sed -e 's/:.*$//'` # Check whether this is an Automake generated Makefile or not. # We used to match only the files named `Makefile.in', but # some people rename them; so instead we look at the file content. # Grep'ing the first line is not enough: some people post-process # each Makefile.in and add a new line on top of each file to say so. # So let's grep whole file. if grep '^#.*generated by automake' $mf > /dev/null 2>&1; then dirpart=`AS_DIRNAME("$mf")` else continue fi # Extract the definition of DEPDIR, am__include, and am__quote # from the Makefile without running `make'. DEPDIR=`sed -n 's/^DEPDIR = //p' < "$mf"` test -z "$DEPDIR" && continue am__include=`sed -n 's/^am__include = //p' < "$mf"` test -z "am__include" && continue am__quote=`sed -n 's/^am__quote = //p' < "$mf"` # When using ansi2knr, U may be empty or an underscore; expand it U=`sed -n 's/^U = //p' < "$mf"` # Find all dependency output files, they are included files with # $(DEPDIR) in their names. We invoke sed twice because it is the # simplest approach to changing $(DEPDIR) to its actual value in the # expansion. for file in `sed -n " s/^$am__include $am__quote\(.*(DEPDIR).*\)$am__quote"'$/\1/p' <"$mf" | \ sed -e 's/\$(DEPDIR)/'"$DEPDIR"'/g' -e 's/\$U/'"$U"'/g'`; do # Make sure the directory exists. test -f "$dirpart/$file" && continue fdir=`AS_DIRNAME(["$file"])` AS_MKDIR_P([$dirpart/$fdir]) # echo "creating $dirpart/$file" echo '# dummy' > "$dirpart/$file" done done ])# _AM_OUTPUT_DEPENDENCY_COMMANDS # AM_OUTPUT_DEPENDENCY_COMMANDS # ----------------------------- # This macro should only be invoked once -- use via AC_REQUIRE. # # This code is only required when automatic dependency tracking # is enabled. FIXME. This creates each `.P' file that we will # need in order to bootstrap the dependency handling code. AC_DEFUN([AM_OUTPUT_DEPENDENCY_COMMANDS], [AC_CONFIG_COMMANDS([depfiles], [test x"$AMDEP_TRUE" != x"" || _AM_OUTPUT_DEPENDENCY_COMMANDS], [AMDEP_TRUE="$AMDEP_TRUE" ac_aux_dir="$ac_aux_dir"]) ]) # Copyright (C) 1996, 1997, 2000, 2001, 2003, 2005 # Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # serial 8 # AM_CONFIG_HEADER is obsolete. It has been replaced by AC_CONFIG_HEADERS. AU_DEFUN([AM_CONFIG_HEADER], [AC_CONFIG_HEADERS($@)]) # Do all the work for Automake. -*- Autoconf -*- # Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005 # Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # serial 12 # This macro actually does too much. Some checks are only needed if # your package does certain things. But this isn't really a big deal. # AM_INIT_AUTOMAKE(PACKAGE, VERSION, [NO-DEFINE]) # AM_INIT_AUTOMAKE([OPTIONS]) # ----------------------------------------------- # The call with PACKAGE and VERSION arguments is the old style # call (pre autoconf-2.50), which is being phased out. PACKAGE # and VERSION should now be passed to AC_INIT and removed from # the call to AM_INIT_AUTOMAKE. # We support both call styles for the transition. After # the next Automake release, Autoconf can make the AC_INIT # arguments mandatory, and then we can depend on a new Autoconf # release and drop the old call support. AC_DEFUN([AM_INIT_AUTOMAKE], [AC_PREREQ([2.58])dnl dnl Autoconf wants to disallow AM_ names. We explicitly allow dnl the ones we care about. m4_pattern_allow([^AM_[A-Z]+FLAGS$])dnl AC_REQUIRE([AM_SET_CURRENT_AUTOMAKE_VERSION])dnl AC_REQUIRE([AC_PROG_INSTALL])dnl # test to see if srcdir already configured if test "`cd $srcdir && pwd`" != "`pwd`" && test -f $srcdir/config.status; then AC_MSG_ERROR([source directory already configured; run "make distclean" there first]) fi # test whether we have cygpath if test -z "$CYGPATH_W"; then if (cygpath --version) >/dev/null 2>/dev/null; then CYGPATH_W='cygpath -w' else CYGPATH_W=echo fi fi AC_SUBST([CYGPATH_W]) # Define the identity of the package. dnl Distinguish between old-style and new-style calls. m4_ifval([$2], [m4_ifval([$3], [_AM_SET_OPTION([no-define])])dnl AC_SUBST([PACKAGE], [$1])dnl AC_SUBST([VERSION], [$2])], [_AM_SET_OPTIONS([$1])dnl AC_SUBST([PACKAGE], ['AC_PACKAGE_TARNAME'])dnl AC_SUBST([VERSION], ['AC_PACKAGE_VERSION'])])dnl _AM_IF_OPTION([no-define],, [AC_DEFINE_UNQUOTED(PACKAGE, "$PACKAGE", [Name of package]) AC_DEFINE_UNQUOTED(VERSION, "$VERSION", [Version number of package])])dnl # Some tools Automake needs. AC_REQUIRE([AM_SANITY_CHECK])dnl AC_REQUIRE([AC_ARG_PROGRAM])dnl AM_MISSING_PROG(ACLOCAL, aclocal-${am__api_version}) AM_MISSING_PROG(AUTOCONF, autoconf) AM_MISSING_PROG(AUTOMAKE, automake-${am__api_version}) AM_MISSING_PROG(AUTOHEADER, autoheader) AM_MISSING_PROG(MAKEINFO, makeinfo) AM_PROG_INSTALL_SH AM_PROG_INSTALL_STRIP AC_REQUIRE([AM_PROG_MKDIR_P])dnl # We need awk for the "check" target. The system "awk" is bad on # some platforms. AC_REQUIRE([AC_PROG_AWK])dnl AC_REQUIRE([AC_PROG_MAKE_SET])dnl AC_REQUIRE([AM_SET_LEADING_DOT])dnl _AM_IF_OPTION([tar-ustar], [_AM_PROG_TAR([ustar])], [_AM_IF_OPTION([tar-pax], [_AM_PROG_TAR([pax])], [_AM_PROG_TAR([v7])])]) _AM_IF_OPTION([no-dependencies],, [AC_PROVIDE_IFELSE([AC_PROG_CC], [_AM_DEPENDENCIES(CC)], [define([AC_PROG_CC], defn([AC_PROG_CC])[_AM_DEPENDENCIES(CC)])])dnl AC_PROVIDE_IFELSE([AC_PROG_CXX], [_AM_DEPENDENCIES(CXX)], [define([AC_PROG_CXX], defn([AC_PROG_CXX])[_AM_DEPENDENCIES(CXX)])])dnl ]) ]) # When config.status generates a header, we must update the stamp-h file. # This file resides in the same directory as the config header # that is generated. The stamp files are numbered to have different names. # Autoconf calls _AC_AM_CONFIG_HEADER_HOOK (when defined) in the # loop where config.status creates the headers, so we can generate # our stamp files there. AC_DEFUN([_AC_AM_CONFIG_HEADER_HOOK], [# Compute $1's index in $config_headers. _am_stamp_count=1 for _am_header in $config_headers :; do case $_am_header in $1 | $1:* ) break ;; * ) _am_stamp_count=`expr $_am_stamp_count + 1` ;; esac done echo "timestamp for $1" >`AS_DIRNAME([$1])`/stamp-h[]$_am_stamp_count]) # Copyright (C) 2001, 2003, 2005 Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # AM_PROG_INSTALL_SH # ------------------ # Define $install_sh. AC_DEFUN([AM_PROG_INSTALL_SH], [AC_REQUIRE([AM_AUX_DIR_EXPAND])dnl install_sh=${install_sh-"$am_aux_dir/install-sh"} AC_SUBST(install_sh)]) # Copyright (C) 2003, 2005 Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # serial 2 # Check whether the underlying file-system supports filenames # with a leading dot. For instance MS-DOS doesn't. AC_DEFUN([AM_SET_LEADING_DOT], [rm -rf .tst 2>/dev/null mkdir .tst 2>/dev/null if test -d .tst; then am__leading_dot=. else am__leading_dot=_ fi rmdir .tst 2>/dev/null AC_SUBST([am__leading_dot])]) # Copyright (C) 1998, 1999, 2000, 2001, 2002, 2003, 2005 # Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # serial 5 # AM_PROG_LEX # ----------- # Autoconf leaves LEX=: if lex or flex can't be found. Change that to a # "missing" invocation, for better error output. AC_DEFUN([AM_PROG_LEX], [AC_PREREQ(2.50)dnl AC_REQUIRE([AM_MISSING_HAS_RUN])dnl AC_REQUIRE([AC_PROG_LEX])dnl if test "$LEX" = :; then LEX=${am_missing_run}flex fi]) # Check to see how 'make' treats includes. -*- Autoconf -*- # Copyright (C) 2001, 2002, 2003, 2005 Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # serial 3 # AM_MAKE_INCLUDE() # ----------------- # Check to see how make treats includes. AC_DEFUN([AM_MAKE_INCLUDE], [am_make=${MAKE-make} cat > confinc << 'END' am__doit: @echo done .PHONY: am__doit END # If we don't find an include directive, just comment out the code. AC_MSG_CHECKING([for style of include used by $am_make]) am__include="#" am__quote= _am_result=none # First try GNU make style include. echo "include confinc" > confmf # We grep out `Entering directory' and `Leaving directory' # messages which can occur if `w' ends up in MAKEFLAGS. # In particular we don't look at `^make:' because GNU make might # be invoked under some other name (usually "gmake"), in which # case it prints its new name instead of `make'. if test "`$am_make -s -f confmf 2> /dev/null | grep -v 'ing directory'`" = "done"; then am__include=include am__quote= _am_result=GNU fi # Now try BSD make style include. if test "$am__include" = "#"; then echo '.include "confinc"' > confmf if test "`$am_make -s -f confmf 2> /dev/null`" = "done"; then am__include=.include am__quote="\"" _am_result=BSD fi fi AC_SUBST([am__include]) AC_SUBST([am__quote]) AC_MSG_RESULT([$_am_result]) rm -f confinc confmf ]) # Fake the existence of programs that GNU maintainers use. -*- Autoconf -*- # Copyright (C) 1997, 1999, 2000, 2001, 2003, 2005 # Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # serial 4 # AM_MISSING_PROG(NAME, PROGRAM) # ------------------------------ AC_DEFUN([AM_MISSING_PROG], [AC_REQUIRE([AM_MISSING_HAS_RUN]) $1=${$1-"${am_missing_run}$2"} AC_SUBST($1)]) # AM_MISSING_HAS_RUN # ------------------ # Define MISSING if not defined so far and test if it supports --run. # If it does, set am_missing_run to use it, otherwise, to nothing. AC_DEFUN([AM_MISSING_HAS_RUN], [AC_REQUIRE([AM_AUX_DIR_EXPAND])dnl test x"${MISSING+set}" = xset || MISSING="\${SHELL} $am_aux_dir/missing" # Use eval to expand $SHELL if eval "$MISSING --run true"; then am_missing_run="$MISSING --run " else am_missing_run= AC_MSG_WARN([`missing' script is too old or missing]) fi ]) # Copyright (C) 2003, 2004, 2005 Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # AM_PROG_MKDIR_P # --------------- # Check whether `mkdir -p' is supported, fallback to mkinstalldirs otherwise. # # Automake 1.8 used `mkdir -m 0755 -p --' to ensure that directories # created by `make install' are always world readable, even if the # installer happens to have an overly restrictive umask (e.g. 077). # This was a mistake. There are at least two reasons why we must not # use `-m 0755': # - it causes special bits like SGID to be ignored, # - it may be too restrictive (some setups expect 775 directories). # # Do not use -m 0755 and let people choose whatever they expect by # setting umask. # # We cannot accept any implementation of `mkdir' that recognizes `-p'. # Some implementations (such as Solaris 8's) are not thread-safe: if a # parallel make tries to run `mkdir -p a/b' and `mkdir -p a/c' # concurrently, both version can detect that a/ is missing, but only # one can create it and the other will error out. Consequently we # restrict ourselves to GNU make (using the --version option ensures # this.) AC_DEFUN([AM_PROG_MKDIR_P], [if mkdir -p --version . >/dev/null 2>&1 && test ! -d ./--version; then # We used to keeping the `.' as first argument, in order to # allow $(mkdir_p) to be used without argument. As in # $(mkdir_p) $(somedir) # where $(somedir) is conditionally defined. However this is wrong # for two reasons: # 1. if the package is installed by a user who cannot write `.' # make install will fail, # 2. the above comment should most certainly read # $(mkdir_p) $(DESTDIR)$(somedir) # so it does not work when $(somedir) is undefined and # $(DESTDIR) is not. # To support the latter case, we have to write # test -z "$(somedir)" || $(mkdir_p) $(DESTDIR)$(somedir), # so the `.' trick is pointless. mkdir_p='mkdir -p --' else # On NextStep and OpenStep, the `mkdir' command does not # recognize any option. It will interpret all options as # directories to create, and then abort because `.' already # exists. for d in ./-p ./--version; do test -d $d && rmdir $d done # $(mkinstalldirs) is defined by Automake if mkinstalldirs exists. if test -f "$ac_aux_dir/mkinstalldirs"; then mkdir_p='$(mkinstalldirs)' else mkdir_p='$(install_sh) -d' fi fi AC_SUBST([mkdir_p])]) # Helper functions for option handling. -*- Autoconf -*- # Copyright (C) 2001, 2002, 2003, 2005 Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # serial 3 # _AM_MANGLE_OPTION(NAME) # ----------------------- AC_DEFUN([_AM_MANGLE_OPTION], [[_AM_OPTION_]m4_bpatsubst($1, [[^a-zA-Z0-9_]], [_])]) # _AM_SET_OPTION(NAME) # ------------------------------ # Set option NAME. Presently that only means defining a flag for this option. AC_DEFUN([_AM_SET_OPTION], [m4_define(_AM_MANGLE_OPTION([$1]), 1)]) # _AM_SET_OPTIONS(OPTIONS) # ---------------------------------- # OPTIONS is a space-separated list of Automake options. AC_DEFUN([_AM_SET_OPTIONS], [AC_FOREACH([_AM_Option], [$1], [_AM_SET_OPTION(_AM_Option)])]) # _AM_IF_OPTION(OPTION, IF-SET, [IF-NOT-SET]) # ------------------------------------------- # Execute IF-SET if OPTION is set, IF-NOT-SET otherwise. AC_DEFUN([_AM_IF_OPTION], [m4_ifset(_AM_MANGLE_OPTION([$1]), [$2], [$3])]) # Check to make sure that the build environment is sane. -*- Autoconf -*- # Copyright (C) 1996, 1997, 2000, 2001, 2003, 2005 # Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # serial 4 # AM_SANITY_CHECK # --------------- AC_DEFUN([AM_SANITY_CHECK], [AC_MSG_CHECKING([whether build environment is sane]) # Just in case sleep 1 echo timestamp > conftest.file # Do `set' in a subshell so we don't clobber the current shell's # arguments. Must try -L first in case configure is actually a # symlink; some systems play weird games with the mod time of symlinks # (eg FreeBSD returns the mod time of the symlink's containing # directory). if ( set X `ls -Lt $srcdir/configure conftest.file 2> /dev/null` if test "$[*]" = "X"; then # -L didn't work. set X `ls -t $srcdir/configure conftest.file` fi rm -f conftest.file if test "$[*]" != "X $srcdir/configure conftest.file" \ && test "$[*]" != "X conftest.file $srcdir/configure"; then # If neither matched, then we have a broken ls. This can happen # if, for instance, CONFIG_SHELL is bash and it inherits a # broken ls alias from the environment. This has actually # happened. Such a system could not be considered "sane". AC_MSG_ERROR([ls -t appears to fail. Make sure there is not a broken alias in your environment]) fi test "$[2]" = conftest.file ) then # Ok. : else AC_MSG_ERROR([newly created file is older than distributed files! Check your system clock]) fi AC_MSG_RESULT(yes)]) # Copyright (C) 2001, 2003, 2005 Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # AM_PROG_INSTALL_STRIP # --------------------- # One issue with vendor `install' (even GNU) is that you can't # specify the program used to strip binaries. This is especially # annoying in cross-compiling environments, where the build's strip # is unlikely to handle the host's binaries. # Fortunately install-sh will honor a STRIPPROG variable, so we # always use install-sh in `make install-strip', and initialize # STRIPPROG with the value of the STRIP variable (set by the user). AC_DEFUN([AM_PROG_INSTALL_STRIP], [AC_REQUIRE([AM_PROG_INSTALL_SH])dnl # Installed binaries are usually stripped using `strip' when the user # run `make install-strip'. However `strip' might not be the right # tool to use in cross-compilation environments, therefore Automake # will honor the `STRIP' environment variable to overrule this program. dnl Don't test for $cross_compiling = yes, because it might be `maybe'. if test "$cross_compiling" != no; then AC_CHECK_TOOL([STRIP], [strip], :) fi INSTALL_STRIP_PROGRAM="\${SHELL} \$(install_sh) -c -s" AC_SUBST([INSTALL_STRIP_PROGRAM])]) # Check how to create a tarball. -*- Autoconf -*- # Copyright (C) 2004, 2005 Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # serial 2 # _AM_PROG_TAR(FORMAT) # -------------------- # Check how to create a tarball in format FORMAT. # FORMAT should be one of `v7', `ustar', or `pax'. # # Substitute a variable $(am__tar) that is a command # writing to stdout a FORMAT-tarball containing the directory # $tardir. # tardir=directory && $(am__tar) > result.tar # # Substitute a variable $(am__untar) that extract such # a tarball read from stdin. # $(am__untar) < result.tar AC_DEFUN([_AM_PROG_TAR], [# Always define AMTAR for backward compatibility. AM_MISSING_PROG([AMTAR], [tar]) m4_if([$1], [v7], [am__tar='${AMTAR} chof - "$$tardir"'; am__untar='${AMTAR} xf -'], [m4_case([$1], [ustar],, [pax],, [m4_fatal([Unknown tar format])]) AC_MSG_CHECKING([how to create a $1 tar archive]) # Loop over all known methods to create a tar archive until one works. _am_tools='gnutar m4_if([$1], [ustar], [plaintar]) pax cpio none' _am_tools=${am_cv_prog_tar_$1-$_am_tools} # Do not fold the above two line into one, because Tru64 sh and # Solaris sh will not grok spaces in the rhs of `-'. for _am_tool in $_am_tools do case $_am_tool in gnutar) for _am_tar in tar gnutar gtar; do AM_RUN_LOG([$_am_tar --version]) && break done am__tar="$_am_tar --format=m4_if([$1], [pax], [posix], [$1]) -chf - "'"$$tardir"' am__tar_="$_am_tar --format=m4_if([$1], [pax], [posix], [$1]) -chf - "'"$tardir"' am__untar="$_am_tar -xf -" ;; plaintar) # Must skip GNU tar: if it does not support --format= it doesn't create # ustar tarball either. (tar --version) >/dev/null 2>&1 && continue am__tar='tar chf - "$$tardir"' am__tar_='tar chf - "$tardir"' am__untar='tar xf -' ;; pax) am__tar='pax -L -x $1 -w "$$tardir"' am__tar_='pax -L -x $1 -w "$tardir"' am__untar='pax -r' ;; cpio) am__tar='find "$$tardir" -print | cpio -o -H $1 -L' am__tar_='find "$tardir" -print | cpio -o -H $1 -L' am__untar='cpio -i -H $1 -d' ;; none) am__tar=false am__tar_=false am__untar=false ;; esac # If the value was cached, stop now. We just wanted to have am__tar # and am__untar set. test -n "${am_cv_prog_tar_$1}" && break # tar/untar a dummy directory, and stop if the command works rm -rf conftest.dir mkdir conftest.dir echo GrepMe > conftest.dir/file AM_RUN_LOG([tardir=conftest.dir && eval $am__tar_ >conftest.tar]) rm -rf conftest.dir if test -s conftest.tar; then AM_RUN_LOG([$am__untar /dev/null 2>&1 && break fi done rm -rf conftest.dir AC_CACHE_VAL([am_cv_prog_tar_$1], [am_cv_prog_tar_$1=$_am_tool]) AC_MSG_RESULT([$am_cv_prog_tar_$1])]) AC_SUBST([am__tar]) AC_SUBST([am__untar]) ]) # _AM_PROG_TAR ruby-mkrf-0.2.3.orig/test/sample_files/syck-0.55/README.EXT0000644000000000000000000003305211672453175021404 0ustar rootroot$Id: README.EXT,v 1.5 2005/05/19 04:51:31 why Exp $ This is the documentation for libsyck and describes how to extend it. = Overview = Syck is designed to take a YAML stream and a symbol table and move data between the two. Your job is to simply provide callback functions which understand the symbol table you are keeping. Syck also includes a simple symbol table implementation. == About the Source == The Syck distribution is laid out as follows: lib/ libsyck source (core API) bytecode.re lexer for YAML bytecode (re2c) emitter.c emitter functions gram.y grammar for YAML documents (bison) handler.c internal handlers which glue the lexer and grammar implicit.re lexer for builtin YAML types (re2c) node.c node allocation and access syck.c parser funcs, central funcs syck.h libsyck definitions syck_st.c symbol table functions syck_st.h symbol table definitions token.re lexer for YAML plaintext (re2c) yaml2byte.c simple bytecode emitter ext/ ruby, python, php, cocoa extensions tests/ unit tests for libsyck YTS.c.rb generates YAML Testing Suite unit test (use: ruby YTS.c.rb > YTS.c) Basic.c allocation and buffering tests Parse.c parser sanity Emit.c emitter sanity == Using SyckNodes == The SyckNode is the structure which YAML data is loaded into while parsing. It's also a good structure to use while emitting, however you may choose to emit directly from your native types if your extension is very small. SyckNodes are designed to be used in conjunction with a symbol table. More on that in a moment. For now, think of a symbol table as a library which stores nodes, assigning each node a unique identifier. This identifier is called the SYMID in Syck. Nodes refer to each other by SYMIDs, rather than pointers. This way, the nodes can be free'd as the parser goes. To be honest, SYMIDs are used because this is the way Ruby works. And this technique means Syck can use Ruby's symbol table directly. But the included symbol table is lightweight, solves the problem of keeping too much data in memory, and simply pairs SYMIDs with your native object type (such as PyObject pointers.) Three kinds of SyckNodes are available: 1. scalar nodes (syck_str_kind): These nodes store a string, a length for the string and a style (indicating the format used in the YAML document). 2. sequence nodes (syck_seq_kind): Sequences are YAML's array or list type. These nodes store a list of items, which allocation is handled by syck functions. 3. mapping nodes (syck_map_kind): Mappings are YAML's dictionary or hashtable type. These nodes store a list of pairs, which allocation is handled by syck functions. The syck_kind_tag enum specifies the above enumerations, which can be tested against the SyckNode.kind field. PLEASE leave the SyckNode.shortcut field alone!! It's used by the parser to workaround parser ambiguities!! === Node API === SyckNode * syck_alloc_str() syck_alloc_seq() syck_alloc_str() Allocates a node of a given type and initializes its internal union to emptiness. When left as-is, these nodes operate as a valid empty string, empty sequence and empty map. Remember that the node's id (SYMID) isn't set by the allocation functions OR any other node functions herein. It's up to your handler function to do that. void syck_free_node( SyckNode *n ) While the Syck parser will free nodes it creates, use this to free your own nodes. This function will free all of its internals, its type_id and its anchor. If you don't need those members free, please be sure they are set to NULL. SyckNode * syck_new_str( char *str, enum scalar_style style ) syck_new_str2( char *str, long len, enum scalar_style style ) Creates scalar nodes from C strings. The first function will call strlen() to determine length. void syck_replace_str( SyckNode *n, char *str, enum scalar_style style ) syck_replace_str2( SyckNode *n, char *str, long len, enum scalar_style style ) Replaces the string content of a node `n', while keeping the node's type_id, anchor and id. char * syck_str_read( SyckNode *n ) Returns a pointer to the null-terminated string inside scalar node `n'. Normally, you might just want to use: char *ptr = n->data.str->ptr long len = n->data.str->len SyckNode * syck_new_map( SYMID key, SYMID value ) Allocates a new map with an initial pair of nodes. void syck_map_empty( SyckNode *n ) Empties the set of pairs for a mapping node. void syck_map_add( SyckNode *n, SYMID key, SYMID value ) Pushes a key-value pair on the mapping. While the ordering of pairs DOES affect the ordering of pairs on output, loaded nodes are deliberately out of order (since YAML mappings do not preserve ordering.) See YAML's builtin !omap type for ordering in mapping nodes. SYMID syck_map_read( SyckNode *n, enum map_part, long index ) Loads a specific key or value from position `index' within a mapping node. Great for iteration: for ( i = 0; i < syck_map_count( n ); i++ ) { SYMID key = sym_map_read( n, map_key, i ); SYMID val = sym_map_read( n, map_value, i ); } void syck_map_assign( SyckNode *n, enum map_part, long index, SYMID id ) Replaces a specific key or value at position `index' within a mapping node. Useful for replacement only, will not allocate more room when assigned beyond the end of the pair list. long syck_map_count( SyckNode *n ) Returns a count of the pairs contained by the mapping node. void syck_map_update( SyckNode *n, SyckNode *n2 ) Combines all pairs from mapping node `n2' into mapping node `n'. SyckNode * syck_new_seq( SYMID val ) Allocates a new seq with an entry `val'. void syck_seq_empty( SyckNode *n ) Empties a sequence node `n'. void syck_seq_add( SyckNode *n, SYMID val ) Pushes a new item `val' onto the end of the sequence. void syck_seq_assign( SyckNode *n, long index, SYMID val ) Replaces the item at position `index' in the sequence node with item `val'. Useful for replacement only, will not allocate more room when assigned beyond the end of the pair list. SYMID syck_seq_read( SyckNode *n, long index ) Reads the item at position `index' in the sequence node. Again, for iteration: for ( i = 0; i < syck_seq_count( n ); i++ ) { SYMID val = sym_seq_read( n, i ); } long syck_seq_count( SyckNode *n ) Returns a count of items contained by sequence node `n'. == YAML Parser == Syck's YAML parser is extremely simple. After setting up a SyckParser struct, along with callback functions for loading node data, use syck_parse() to start reading data. Since syck_parse() only reads single documents, the stream can be managed by calling syck_parse() repeatedly for an IO source. The parser has four callbacks: one for reading from the IO source, one for handling errors that show up, one for handling nodes as they come in, one for handling bad anchors in the document. Nodes are loaded in the order they appear in the YAML document, however nested nodes are loaded before their parent. === How to Write a Node Handler === Inside the node handler, the normal process should be: 1. Convert the SyckNode data to a structure meaningful to your application. 2. Check for the bad anchor caveat described in the next section. 3. Add the new structure to the symbol table attached to the parser. Found at parser->syms. 4. Return the SYMID reserved in the symbol table. === Nodes and Memory Allocation === One thing about SyckNodes passed into your handler: Syck WILL free the node once your handler is done with it. The node is temporary. So, if you plan on keeping a node around, you'll need to make yourself a new copy. And you'll probably need to reassign all the items in a sequence and pairs in a map. You can do this with syck_seq_assign() and syck_map_assign(). But, before you do that, you might consider using your own node structure that fits your application better. === A Note About Anchors in Parsing === YAML anchors can be recursive. This means deeper alias nodes can be loaded before the anchor. This is the trickiest part of the loading process. Assuming this YAML document: --- &a [*a] The loading process is: 1. Load alias *a by calling parser->bad_anchor_handler, which reserves a SYMID in the symbol table. 2. The `a' anchor is added to Syck's own anchor table, referencing the SYMID above. 3. When the anchor &a is found, the SyckNode created is given the SYMID of the bad anchor node above. (Usually nodes created at this stage have the `id' blank.) 4. The parser->handler function is called with that node. Check for node->id in the handler and overwrite the bad anchor node with the new node. === Parser API === See for layouts of SyckParser and SyckNode. SyckParser * syck_new_parser() Creates a new Syck parser. void syck_free_parser( SyckParser *p ) Frees the parser, as well as associated symbol tables and buffers. void syck_parser_implicit_typing( SyckParser *p, int on ) Toggles implicit typing of builtin YAML types. If this is passed a zero, YAML builtin types will be ignored (!int, !float, etc.) The default is 1. void syck_parser_taguri_expansion( SyckParser *p, int on ) Toggles expansion of types in full taguri. This defaults to 1 and is recommended to stay as 1. Turning this off removes a layer of abstraction that will cause incompatibilities between YAML documents of differing versions. void syck_parser_handler( SyckParser *p, SyckNodeHandler h ) Assign a callback function as a node handler. The SyckNodeHandler signature looks like this: SYMID node_handler( SyckParser *p, SyckNode *n ) void syck_parser_error_handler( SyckParser *p, SyckErrorHandler h ) Assign a callback function as an error handler. The SyckErrorHandler signature looks like this: void error_handler( SyckParser *p, char *str ) void syck_parser_bad_anchor_handler( SyckParser *p, SyckBadAnchorHandler h ) Assign a callback function as a bad anchor handler. The SyckBadAnchorHandler signature looks like this: SyckNode *bad_anchor_handler( SyckParser *p, char *anchor ) void syck_parser_file( SyckParser *p, FILE *f, SyckIoFileRead r ) Assigns a FILE pointer as an IO source and a callback function which handles buffering of that IO source. The SyckIoFileRead signature looks like this: long SyckIoFileRead( char *buf, SyckIoFile *file, long max_size, long skip ); Syck comes with a default FILE handler named `syck_io_file_read'. You can assign this default handler explicitly or by simply passing in NULL as the `r' parameter. void syck_parser_str( SyckParser *p, char *ptr, long len, SyckIoStrRead r ) Assigns a string as the IO source with a callback function `r' which handles buffering of the string. The SyckIoStrRead signature looks like this: long SyckIoFileRead( char *buf, SyckIoStr *str, long max_size, long skip ); Syck comes with a default string handler named `syck_io_str_read'. You can assign this default handler explicitly or by simply passing in NULL as the `r' parameter. void syck_parser_str_auto( SyckParser *p, char *ptr, SyckIoStrRead r ) Same as the above, but uses strlen() to determine string size. SYMID syck_parse( SyckParser *p ) Parses a single document from the YAML stream, returning the SYMID for the root node. == YAML Emitter == Since the YAML 0.50 release, Syck has featured a new emitter API. The idea here is to let Syck figure out shortcuts that will clean up output, detect builtin YAML types and -- especially -- determine the best way to format outgoing strings. The trick with the emitter is to learn its functions and let it do its job. If you don't like the formatting Syck is producing, please get in contact the author and pitch your ideas!! Like the YAML parser, the emitter has a couple of callbacks: namely, one for IO output and one for handling nodes. Nodes aren't necessarily SyckNodes. Since we're ultimately worried about creating a string, SyckNodes become sort of unnecessary. === The Emitter Process === 1. Traverse the structure you will be emitting, registering all nodes with the emitter using syck_emitter_mark_node(). This step will determine anchors and aliases in advance. 2. Call syck_emit() to begin emitting the root node. 3. Within your emitter handler, use the syck_emit_* convenience methods to build the document. 4. Call syck_emit_flush() to end the document and push the remaining document to the IO stream. Or continue to add documents to the output stream with syck_emit(). === Emitter API === See for the layout of SyckEmitter. SyckEmitter * syck_new_emitter() Creates a new Syck emitter. SYMID syck_emitter_mark_node( SyckEmitter *e, st_data_t node ) Adds an outgoing node to the symbol table, allocating an anchor for it if it has repeated in the document and scanning the type tag for auto-shortcut. void syck_output_handler( SyckEmitter *e, SyckOutputHandler out ) Assigns a callback as the output handler. void *out_handler( SyckEmitter *e, char * ptr, long len ); Receives the emitter object, pointer to the buffer and a count of bytes which should be read from the buffer. void syck_emitter_handler( SyckEmitter *e, SyckEmitterHandler void syck_free_emitter ruby-mkrf-0.2.3.orig/test/sample_files/syck-0.55/config.status0000644000000000000000000011166211672453175022603 0ustar rootroot#! /bin/sh # Generated by configure. # Run this file to recreate the current configuration. # Compiler output produced by configure, useful for debugging # configure, is in config.log if it exists. debug=false ac_cs_recheck=false ac_cs_silent=false SHELL=${CONFIG_SHELL-/bin/sh} ## --------------------- ## ## M4sh Initialization. ## ## --------------------- ## # Be Bourne compatible if test -n "${ZSH_VERSION+set}" && (emulate sh) >/dev/null 2>&1; then emulate sh NULLCMD=: # Zsh 3.x and 4.x performs word splitting on ${1+"$@"}, which # is contrary to our usage. Disable this feature. alias -g '${1+"$@"}'='"$@"' elif test -n "${BASH_VERSION+set}" && (set -o posix) >/dev/null 2>&1; then set -o posix fi DUALCASE=1; export DUALCASE # for MKS sh # Support unset when possible. if ( (MAIL=60; unset MAIL) || exit) >/dev/null 2>&1; then as_unset=unset else as_unset=false fi # Work around bugs in pre-3.0 UWIN ksh. $as_unset ENV MAIL MAILPATH PS1='$ ' PS2='> ' PS4='+ ' # NLS nuisances. for as_var in \ LANG LANGUAGE LC_ADDRESS LC_ALL LC_COLLATE LC_CTYPE LC_IDENTIFICATION \ LC_MEASUREMENT LC_MESSAGES LC_MONETARY LC_NAME LC_NUMERIC LC_PAPER \ LC_TELEPHONE LC_TIME do if (set +x; test -z "`(eval $as_var=C; export $as_var) 2>&1`"); then eval $as_var=C; export $as_var else $as_unset $as_var fi done # Required to use basename. if expr a : '\(a\)' >/dev/null 2>&1; then as_expr=expr else as_expr=false fi if (basename /) >/dev/null 2>&1 && test "X`basename / 2>&1`" = "X/"; then as_basename=basename else as_basename=false fi # Name of the executable. as_me=`$as_basename "$0" || $as_expr X/"$0" : '.*/\([^/][^/]*\)/*$' \| \ X"$0" : 'X\(//\)$' \| \ X"$0" : 'X\(/\)$' \| \ . : '\(.\)' 2>/dev/null || echo X/"$0" | sed '/^.*\/\([^/][^/]*\)\/*$/{ s//\1/; q; } /^X\/\(\/\/\)$/{ s//\1/; q; } /^X\/\(\/\).*/{ s//\1/; q; } s/.*/./; q'` # PATH needs CR, and LINENO needs CR and PATH. # Avoid depending upon Character Ranges. as_cr_letters='abcdefghijklmnopqrstuvwxyz' as_cr_LETTERS='ABCDEFGHIJKLMNOPQRSTUVWXYZ' as_cr_Letters=$as_cr_letters$as_cr_LETTERS as_cr_digits='0123456789' as_cr_alnum=$as_cr_Letters$as_cr_digits # The user is always right. if test "${PATH_SEPARATOR+set}" != set; then echo "#! /bin/sh" >conf$$.sh echo "exit 0" >>conf$$.sh chmod +x conf$$.sh if (PATH="/nonexistent;."; conf$$.sh) >/dev/null 2>&1; then PATH_SEPARATOR=';' else PATH_SEPARATOR=: fi rm -f conf$$.sh fi as_lineno_1=$LINENO as_lineno_2=$LINENO as_lineno_3=`(expr $as_lineno_1 + 1) 2>/dev/null` test "x$as_lineno_1" != "x$as_lineno_2" && test "x$as_lineno_3" = "x$as_lineno_2" || { # Find who we are. Look in the path if we contain no path at all # relative or not. case $0 in *[\\/]* ) as_myself=$0 ;; *) as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. test -r "$as_dir/$0" && as_myself=$as_dir/$0 && break done ;; esac # We did not find ourselves, most probably we were run as `sh COMMAND' # in which case we are not to be found in the path. if test "x$as_myself" = x; then as_myself=$0 fi if test ! -f "$as_myself"; then { { echo "$as_me:$LINENO: error: cannot find myself; rerun with an absolute path" >&5 echo "$as_me: error: cannot find myself; rerun with an absolute path" >&2;} { (exit 1); exit 1; }; } fi case $CONFIG_SHELL in '') as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in /bin$PATH_SEPARATOR/usr/bin$PATH_SEPARATOR$PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for as_base in sh bash ksh sh5; do case $as_dir in /*) if ("$as_dir/$as_base" -c ' as_lineno_1=$LINENO as_lineno_2=$LINENO as_lineno_3=`(expr $as_lineno_1 + 1) 2>/dev/null` test "x$as_lineno_1" != "x$as_lineno_2" && test "x$as_lineno_3" = "x$as_lineno_2" ') 2>/dev/null; then $as_unset BASH_ENV || test "${BASH_ENV+set}" != set || { BASH_ENV=; export BASH_ENV; } $as_unset ENV || test "${ENV+set}" != set || { ENV=; export ENV; } CONFIG_SHELL=$as_dir/$as_base export CONFIG_SHELL exec "$CONFIG_SHELL" "$0" ${1+"$@"} fi;; esac done done ;; esac # Create $as_me.lineno as a copy of $as_myself, but with $LINENO # uniformly replaced by the line number. The first 'sed' inserts a # line-number line before each line; the second 'sed' does the real # work. The second script uses 'N' to pair each line-number line # with the numbered line, and appends trailing '-' during # substitution so that $LINENO is not a special case at line end. # (Raja R Harinath suggested sed '=', and Paul Eggert wrote the # second 'sed' script. Blame Lee E. McMahon for sed's syntax. :-) sed '=' <$as_myself | sed ' N s,$,-, : loop s,^\(['$as_cr_digits']*\)\(.*\)[$]LINENO\([^'$as_cr_alnum'_]\),\1\2\1\3, t loop s,-$,, s,^['$as_cr_digits']*\n,, ' >$as_me.lineno && chmod +x $as_me.lineno || { { echo "$as_me:$LINENO: error: cannot create $as_me.lineno; rerun with a POSIX shell" >&5 echo "$as_me: error: cannot create $as_me.lineno; rerun with a POSIX shell" >&2;} { (exit 1); exit 1; }; } # Don't try to exec as it changes $[0], causing all sort of problems # (the dirname of $[0] is not the place where we might find the # original and so on. Autoconf is especially sensible to this). . ./$as_me.lineno # Exit status is that of the last command. exit } case `echo "testing\c"; echo 1,2,3`,`echo -n testing; echo 1,2,3` in *c*,-n*) ECHO_N= ECHO_C=' ' ECHO_T=' ' ;; *c*,* ) ECHO_N=-n ECHO_C= ECHO_T= ;; *) ECHO_N= ECHO_C='\c' ECHO_T= ;; esac if expr a : '\(a\)' >/dev/null 2>&1; then as_expr=expr else as_expr=false fi rm -f conf$$ conf$$.exe conf$$.file echo >conf$$.file if ln -s conf$$.file conf$$ 2>/dev/null; then # We could just check for DJGPP; but this test a) works b) is more generic # and c) will remain valid once DJGPP supports symlinks (DJGPP 2.04). if test -f conf$$.exe; then # Don't use ln at all; we don't have any links as_ln_s='cp -p' else as_ln_s='ln -s' fi elif ln conf$$.file conf$$ 2>/dev/null; then as_ln_s=ln else as_ln_s='cp -p' fi rm -f conf$$ conf$$.exe conf$$.file if mkdir -p . 2>/dev/null; then as_mkdir_p=: else test -d ./-p && rmdir ./-p as_mkdir_p=false fi as_executable_p="test -f" # Sed expression to map a string onto a valid CPP name. as_tr_cpp="eval sed 'y%*$as_cr_letters%P$as_cr_LETTERS%;s%[^_$as_cr_alnum]%_%g'" # Sed expression to map a string onto a valid variable name. as_tr_sh="eval sed 'y%*+%pp%;s%[^_$as_cr_alnum]%_%g'" # IFS # We need space, tab and new line, in precisely that order. as_nl=' ' IFS=" $as_nl" # CDPATH. $as_unset CDPATH exec 6>&1 # Open the log real soon, to keep \$[0] and so on meaningful, and to # report actual input values of CONFIG_FILES etc. instead of their # values after options handling. Logging --version etc. is OK. exec 5>>config.log { echo sed 'h;s/./-/g;s/^.../## /;s/...$/ ##/;p;x;p;x' <<_ASBOX ## Running $as_me. ## _ASBOX } >&5 cat >&5 <<_CSEOF This file was extended by syck $as_me 0.54, which was generated by GNU Autoconf 2.59. Invocation command line was CONFIG_FILES = $CONFIG_FILES CONFIG_HEADERS = $CONFIG_HEADERS CONFIG_LINKS = $CONFIG_LINKS CONFIG_COMMANDS = $CONFIG_COMMANDS $ $0 $@ _CSEOF echo "on `(hostname || uname -n) 2>/dev/null | sed 1q`" >&5 echo >&5 config_files=" Makefile lib/Makefile tests/Makefile" config_headers=" config.h" config_commands=" depfiles" ac_cs_usage="\ \`$as_me' instantiates files from templates according to the current configuration. Usage: $0 [OPTIONS] [FILE]... -h, --help print this help, then exit -V, --version print version number, then exit -q, --quiet do not print progress messages -d, --debug don't remove temporary files --recheck update $as_me by reconfiguring in the same conditions --file=FILE[:TEMPLATE] instantiate the configuration file FILE --header=FILE[:TEMPLATE] instantiate the configuration header FILE Configuration files: $config_files Configuration headers: $config_headers Configuration commands: $config_commands Report bugs to ." ac_cs_version="\ syck config.status 0.54 configured by ./configure, generated by GNU Autoconf 2.59, with options \"\" Copyright (C) 2003 Free Software Foundation, Inc. This config.status script is free software; the Free Software Foundation gives unlimited permission to copy, distribute and modify it." srcdir=. INSTALL="/usr/bin/install -c" # If no file are specified by the user, then we need to provide default # value. By we need to know if files were specified by the user. ac_need_defaults=: while test $# != 0 do case $1 in --*=*) ac_option=`expr "x$1" : 'x\([^=]*\)='` ac_optarg=`expr "x$1" : 'x[^=]*=\(.*\)'` ac_shift=: ;; -*) ac_option=$1 ac_optarg=$2 ac_shift=shift ;; *) # This is not an option, so the user has probably given explicit # arguments. ac_option=$1 ac_need_defaults=false;; esac case $ac_option in # Handling of the options. -recheck | --recheck | --rechec | --reche | --rech | --rec | --re | --r) ac_cs_recheck=: ;; --version | --vers* | -V ) echo "$ac_cs_version"; exit 0 ;; --he | --h) # Conflict between --help and --header { { echo "$as_me:$LINENO: error: ambiguous option: $1 Try \`$0 --help' for more information." >&5 echo "$as_me: error: ambiguous option: $1 Try \`$0 --help' for more information." >&2;} { (exit 1); exit 1; }; };; --help | --hel | -h ) echo "$ac_cs_usage"; exit 0 ;; --debug | --d* | -d ) debug=: ;; --file | --fil | --fi | --f ) $ac_shift CONFIG_FILES="$CONFIG_FILES $ac_optarg" ac_need_defaults=false;; --header | --heade | --head | --hea ) $ac_shift CONFIG_HEADERS="$CONFIG_HEADERS $ac_optarg" ac_need_defaults=false;; -q | -quiet | --quiet | --quie | --qui | --qu | --q \ | -silent | --silent | --silen | --sile | --sil | --si | --s) ac_cs_silent=: ;; # This is an error. -*) { { echo "$as_me:$LINENO: error: unrecognized option: $1 Try \`$0 --help' for more information." >&5 echo "$as_me: error: unrecognized option: $1 Try \`$0 --help' for more information." >&2;} { (exit 1); exit 1; }; } ;; *) ac_config_targets="$ac_config_targets $1" ;; esac shift done ac_configure_extra_args= if $ac_cs_silent; then exec 6>/dev/null ac_configure_extra_args="$ac_configure_extra_args --silent" fi if $ac_cs_recheck; then echo "running /bin/sh ./configure " $ac_configure_extra_args " --no-create --no-recursion" >&6 exec /bin/sh ./configure $ac_configure_extra_args --no-create --no-recursion fi # # INIT-COMMANDS section. # AMDEP_TRUE="" ac_aux_dir="config" for ac_config_target in $ac_config_targets do case "$ac_config_target" in # Handling of arguments. "Makefile" ) CONFIG_FILES="$CONFIG_FILES Makefile" ;; "lib/Makefile" ) CONFIG_FILES="$CONFIG_FILES lib/Makefile" ;; "tests/Makefile" ) CONFIG_FILES="$CONFIG_FILES tests/Makefile" ;; "depfiles" ) CONFIG_COMMANDS="$CONFIG_COMMANDS depfiles" ;; "config.h" ) CONFIG_HEADERS="$CONFIG_HEADERS config.h" ;; *) { { echo "$as_me:$LINENO: error: invalid argument: $ac_config_target" >&5 echo "$as_me: error: invalid argument: $ac_config_target" >&2;} { (exit 1); exit 1; }; };; esac done # If the user did not use the arguments to specify the items to instantiate, # then the envvar interface is used. Set only those that are not. # We use the long form for the default assignment because of an extremely # bizarre bug on SunOS 4.1.3. if $ac_need_defaults; then test "${CONFIG_FILES+set}" = set || CONFIG_FILES=$config_files test "${CONFIG_HEADERS+set}" = set || CONFIG_HEADERS=$config_headers test "${CONFIG_COMMANDS+set}" = set || CONFIG_COMMANDS=$config_commands fi # Have a temporary directory for convenience. Make it in the build tree # simply because there is no reason to put it here, and in addition, # creating and moving files from /tmp can sometimes cause problems. # Create a temporary directory, and hook for its removal unless debugging. $debug || { trap 'exit_status=$?; rm -rf $tmp && exit $exit_status' 0 trap '{ (exit 1); exit 1; }' 1 2 13 15 } # Create a (secure) tmp directory for tmp files. { tmp=`(umask 077 && mktemp -d -q "./confstatXXXXXX") 2>/dev/null` && test -n "$tmp" && test -d "$tmp" } || { tmp=./confstat$$-$RANDOM (umask 077 && mkdir $tmp) } || { echo "$me: cannot create a temporary directory in ." >&2 { (exit 1); exit 1; } } # # CONFIG_FILES section. # # No need to generate the scripts if there are no CONFIG_FILES. # This happens for instance when ./config.status config.h if test -n "$CONFIG_FILES"; then # Protect against being on the right side of a sed subst in config.status. sed 's/,@/@@/; s/@,/@@/; s/,;t t$/@;t t/; /@;t t$/s/[\\&,]/\\&/g; s/@@/,@/; s/@@/@,/; s/@;t t$/,;t t/' >$tmp/subs.sed <<\CEOF s,@SHELL@,/bin/sh,;t t s,@PATH_SEPARATOR@,:,;t t s,@PACKAGE_NAME@,syck,;t t s,@PACKAGE_TARNAME@,syck,;t t s,@PACKAGE_VERSION@,0.54,;t t s,@PACKAGE_STRING@,syck 0.54,;t t s,@PACKAGE_BUGREPORT@,,;t t s,@exec_prefix@,${prefix},;t t s,@prefix@,/usr/local,;t t s,@program_transform_name@,s,x,x,,;t t s,@bindir@,${exec_prefix}/bin,;t t s,@sbindir@,${exec_prefix}/sbin,;t t s,@libexecdir@,${exec_prefix}/libexec,;t t s,@datadir@,${prefix}/share,;t t s,@sysconfdir@,${prefix}/etc,;t t s,@sharedstatedir@,${prefix}/com,;t t s,@localstatedir@,${prefix}/var,;t t s,@libdir@,${exec_prefix}/lib,;t t s,@includedir@,${prefix}/include,;t t s,@oldincludedir@,/usr/include,;t t s,@infodir@,${prefix}/info,;t t s,@mandir@,${prefix}/man,;t t s,@build_alias@,,;t t s,@host_alias@,,;t t s,@target_alias@,,;t t s,@DEFS@,-DHAVE_CONFIG_H,;t t s,@ECHO_C@,,;t t s,@ECHO_N@,-n,;t t s,@ECHO_T@,,;t t s,@LIBS@,,;t t s,@INSTALL_PROGRAM@,${INSTALL},;t t s,@INSTALL_SCRIPT@,${INSTALL},;t t s,@INSTALL_DATA@,${INSTALL} -m 644,;t t s,@CYGPATH_W@,echo,;t t s,@PACKAGE@,syck,;t t s,@VERSION@,0.54,;t t s,@ACLOCAL@,${SHELL} /home/why/sand/syck-0.55/config/missing --run aclocal-1.9,;t t s,@AUTOCONF@,${SHELL} /home/why/sand/syck-0.55/config/missing --run autoconf,;t t s,@AUTOMAKE@,${SHELL} /home/why/sand/syck-0.55/config/missing --run automake-1.9,;t t s,@AUTOHEADER@,${SHELL} /home/why/sand/syck-0.55/config/missing --run autoheader,;t t s,@MAKEINFO@,${SHELL} /home/why/sand/syck-0.55/config/missing --run makeinfo,;t t s,@install_sh@,/home/why/sand/syck-0.55/config/install-sh,;t t s,@STRIP@,,;t t s,@ac_ct_STRIP@,,;t t s,@INSTALL_STRIP_PROGRAM@,${SHELL} $(install_sh) -c -s,;t t s,@mkdir_p@,mkdir -p --,;t t s,@AWK@,gawk,;t t s,@SET_MAKE@,,;t t s,@am__leading_dot@,.,;t t s,@AMTAR@,${SHELL} /home/why/sand/syck-0.55/config/missing --run tar,;t t s,@am__tar@,${AMTAR} chof - "$$tardir",;t t s,@am__untar@,${AMTAR} xf -,;t t s,@LN_S@,ln -s,;t t s,@RANLIB@,ranlib,;t t s,@ac_ct_RANLIB@,ranlib,;t t s,@YACC@,bison -y,;t t s,@LEX@,flex,;t t s,@CC@,gcc,;t t s,@CFLAGS@,-g -O2,;t t s,@LDFLAGS@,,;t t s,@CPPFLAGS@,,;t t s,@ac_ct_CC@,gcc,;t t s,@EXEEXT@,,;t t s,@OBJEXT@,o,;t t s,@DEPDIR@,.deps,;t t s,@am__include@,include,;t t s,@am__quote@,,;t t s,@AMDEP_TRUE@,,;t t s,@AMDEP_FALSE@,#,;t t s,@AMDEPBACKSLASH@,\,;t t s,@CCDEPMODE@,depmode=gcc3,;t t s,@am__fastdepCC_TRUE@,,;t t s,@am__fastdepCC_FALSE@,#,;t t s,@LEXLIB@,-lfl,;t t s,@LEX_OUTPUT_ROOT@,lex.yy,;t t s,@CPP@,gcc -E,;t t s,@EGREP@,grep -E,;t t s,@LIBOBJS@,,;t t s,@LTLIBOBJS@,,;t t CEOF # Split the substitutions into bite-sized pieces for seds with # small command number limits, like on Digital OSF/1 and HP-UX. ac_max_sed_lines=48 ac_sed_frag=1 # Number of current file. ac_beg=1 # First line for current file. ac_end=$ac_max_sed_lines # Line after last line for current file. ac_more_lines=: ac_sed_cmds= while $ac_more_lines; do if test $ac_beg -gt 1; then sed "1,${ac_beg}d; ${ac_end}q" $tmp/subs.sed >$tmp/subs.frag else sed "${ac_end}q" $tmp/subs.sed >$tmp/subs.frag fi if test ! -s $tmp/subs.frag; then ac_more_lines=false else # The purpose of the label and of the branching condition is to # speed up the sed processing (if there are no `@' at all, there # is no need to browse any of the substitutions). # These are the two extra sed commands mentioned above. (echo ':t /@[a-zA-Z_][a-zA-Z_0-9]*@/!b' && cat $tmp/subs.frag) >$tmp/subs-$ac_sed_frag.sed if test -z "$ac_sed_cmds"; then ac_sed_cmds="sed -f $tmp/subs-$ac_sed_frag.sed" else ac_sed_cmds="$ac_sed_cmds | sed -f $tmp/subs-$ac_sed_frag.sed" fi ac_sed_frag=`expr $ac_sed_frag + 1` ac_beg=$ac_end ac_end=`expr $ac_end + $ac_max_sed_lines` fi done if test -z "$ac_sed_cmds"; then ac_sed_cmds=cat fi fi # test -n "$CONFIG_FILES" for ac_file in : $CONFIG_FILES; do test "x$ac_file" = x: && continue # Support "outfile[:infile[:infile...]]", defaulting infile="outfile.in". case $ac_file in - | *:- | *:-:* ) # input from stdin cat >$tmp/stdin ac_file_in=`echo "$ac_file" | sed 's,[^:]*:,,'` ac_file=`echo "$ac_file" | sed 's,:.*,,'` ;; *:* ) ac_file_in=`echo "$ac_file" | sed 's,[^:]*:,,'` ac_file=`echo "$ac_file" | sed 's,:.*,,'` ;; * ) ac_file_in=$ac_file.in ;; esac # Compute @srcdir@, @top_srcdir@, and @INSTALL@ for subdirectories. ac_dir=`(dirname "$ac_file") 2>/dev/null || $as_expr X"$ac_file" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ X"$ac_file" : 'X\(//\)[^/]' \| \ X"$ac_file" : 'X\(//\)$' \| \ X"$ac_file" : 'X\(/\)' \| \ . : '\(.\)' 2>/dev/null || echo X"$ac_file" | sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ s//\1/; q; } /^X\(\/\/\)[^/].*/{ s//\1/; q; } /^X\(\/\/\)$/{ s//\1/; q; } /^X\(\/\).*/{ s//\1/; q; } s/.*/./; q'` { if $as_mkdir_p; then mkdir -p "$ac_dir" else as_dir="$ac_dir" as_dirs= while test ! -d "$as_dir"; do as_dirs="$as_dir $as_dirs" as_dir=`(dirname "$as_dir") 2>/dev/null || $as_expr X"$as_dir" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ X"$as_dir" : 'X\(//\)[^/]' \| \ X"$as_dir" : 'X\(//\)$' \| \ X"$as_dir" : 'X\(/\)' \| \ . : '\(.\)' 2>/dev/null || echo X"$as_dir" | sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ s//\1/; q; } /^X\(\/\/\)[^/].*/{ s//\1/; q; } /^X\(\/\/\)$/{ s//\1/; q; } /^X\(\/\).*/{ s//\1/; q; } s/.*/./; q'` done test ! -n "$as_dirs" || mkdir $as_dirs fi || { { echo "$as_me:$LINENO: error: cannot create directory \"$ac_dir\"" >&5 echo "$as_me: error: cannot create directory \"$ac_dir\"" >&2;} { (exit 1); exit 1; }; }; } ac_builddir=. if test "$ac_dir" != .; then ac_dir_suffix=/`echo "$ac_dir" | sed 's,^\.[\\/],,'` # A "../" for each directory in $ac_dir_suffix. ac_top_builddir=`echo "$ac_dir_suffix" | sed 's,/[^\\/]*,../,g'` else ac_dir_suffix= ac_top_builddir= fi case $srcdir in .) # No --srcdir option. We are building in place. ac_srcdir=. if test -z "$ac_top_builddir"; then ac_top_srcdir=. else ac_top_srcdir=`echo $ac_top_builddir | sed 's,/$,,'` fi ;; [\\/]* | ?:[\\/]* ) # Absolute path. ac_srcdir=$srcdir$ac_dir_suffix; ac_top_srcdir=$srcdir ;; *) # Relative path. ac_srcdir=$ac_top_builddir$srcdir$ac_dir_suffix ac_top_srcdir=$ac_top_builddir$srcdir ;; esac # Do not use `cd foo && pwd` to compute absolute paths, because # the directories may not exist. case `pwd` in .) ac_abs_builddir="$ac_dir";; *) case "$ac_dir" in .) ac_abs_builddir=`pwd`;; [\\/]* | ?:[\\/]* ) ac_abs_builddir="$ac_dir";; *) ac_abs_builddir=`pwd`/"$ac_dir";; esac;; esac case $ac_abs_builddir in .) ac_abs_top_builddir=${ac_top_builddir}.;; *) case ${ac_top_builddir}. in .) ac_abs_top_builddir=$ac_abs_builddir;; [\\/]* | ?:[\\/]* ) ac_abs_top_builddir=${ac_top_builddir}.;; *) ac_abs_top_builddir=$ac_abs_builddir/${ac_top_builddir}.;; esac;; esac case $ac_abs_builddir in .) ac_abs_srcdir=$ac_srcdir;; *) case $ac_srcdir in .) ac_abs_srcdir=$ac_abs_builddir;; [\\/]* | ?:[\\/]* ) ac_abs_srcdir=$ac_srcdir;; *) ac_abs_srcdir=$ac_abs_builddir/$ac_srcdir;; esac;; esac case $ac_abs_builddir in .) ac_abs_top_srcdir=$ac_top_srcdir;; *) case $ac_top_srcdir in .) ac_abs_top_srcdir=$ac_abs_builddir;; [\\/]* | ?:[\\/]* ) ac_abs_top_srcdir=$ac_top_srcdir;; *) ac_abs_top_srcdir=$ac_abs_builddir/$ac_top_srcdir;; esac;; esac case $INSTALL in [\\/$]* | ?:[\\/]* ) ac_INSTALL=$INSTALL ;; *) ac_INSTALL=$ac_top_builddir$INSTALL ;; esac if test x"$ac_file" != x-; then { echo "$as_me:$LINENO: creating $ac_file" >&5 echo "$as_me: creating $ac_file" >&6;} rm -f "$ac_file" fi # Let's still pretend it is `configure' which instantiates (i.e., don't # use $as_me), people would be surprised to read: # /* config.h. Generated by config.status. */ if test x"$ac_file" = x-; then configure_input= else configure_input="$ac_file. " fi configure_input=$configure_input"Generated from `echo $ac_file_in | sed 's,.*/,,'` by configure." # First look for the input files in the build tree, otherwise in the # src tree. ac_file_inputs=`IFS=: for f in $ac_file_in; do case $f in -) echo $tmp/stdin ;; [\\/$]*) # Absolute (can't be DOS-style, as IFS=:) test -f "$f" || { { echo "$as_me:$LINENO: error: cannot find input file: $f" >&5 echo "$as_me: error: cannot find input file: $f" >&2;} { (exit 1); exit 1; }; } echo "$f";; *) # Relative if test -f "$f"; then # Build tree echo "$f" elif test -f "$srcdir/$f"; then # Source tree echo "$srcdir/$f" else # /dev/null tree { { echo "$as_me:$LINENO: error: cannot find input file: $f" >&5 echo "$as_me: error: cannot find input file: $f" >&2;} { (exit 1); exit 1; }; } fi;; esac done` || { (exit 1); exit 1; } sed "/^[ ]*VPATH[ ]*=/{ s/:*\$(srcdir):*/:/; s/:*\${srcdir}:*/:/; s/:*@srcdir@:*/:/; s/^\([^=]*=[ ]*\):*/\1/; s/:*$//; s/^[^=]*=[ ]*$//; } :t /@[a-zA-Z_][a-zA-Z_0-9]*@/!b s,@configure_input@,$configure_input,;t t s,@srcdir@,$ac_srcdir,;t t s,@abs_srcdir@,$ac_abs_srcdir,;t t s,@top_srcdir@,$ac_top_srcdir,;t t s,@abs_top_srcdir@,$ac_abs_top_srcdir,;t t s,@builddir@,$ac_builddir,;t t s,@abs_builddir@,$ac_abs_builddir,;t t s,@top_builddir@,$ac_top_builddir,;t t s,@abs_top_builddir@,$ac_abs_top_builddir,;t t s,@INSTALL@,$ac_INSTALL,;t t " $ac_file_inputs | (eval "$ac_sed_cmds") >$tmp/out rm -f $tmp/stdin if test x"$ac_file" != x-; then mv $tmp/out $ac_file else cat $tmp/out rm -f $tmp/out fi done # # CONFIG_HEADER section. # # These sed commands are passed to sed as "A NAME B NAME C VALUE D", where # NAME is the cpp macro being defined and VALUE is the value it is being given. # # ac_d sets the value in "#define NAME VALUE" lines. ac_dA='s,^\([ ]*\)#\([ ]*define[ ][ ]*\)' ac_dB='[ ].*$,\1#\2' ac_dC=' ' ac_dD=',;t' # ac_u turns "#undef NAME" without trailing blanks into "#define NAME VALUE". ac_uA='s,^\([ ]*\)#\([ ]*\)undef\([ ][ ]*\)' ac_uB='$,\1#\2define\3' ac_uC=' ' ac_uD=',;t' for ac_file in : $CONFIG_HEADERS; do test "x$ac_file" = x: && continue # Support "outfile[:infile[:infile...]]", defaulting infile="outfile.in". case $ac_file in - | *:- | *:-:* ) # input from stdin cat >$tmp/stdin ac_file_in=`echo "$ac_file" | sed 's,[^:]*:,,'` ac_file=`echo "$ac_file" | sed 's,:.*,,'` ;; *:* ) ac_file_in=`echo "$ac_file" | sed 's,[^:]*:,,'` ac_file=`echo "$ac_file" | sed 's,:.*,,'` ;; * ) ac_file_in=$ac_file.in ;; esac test x"$ac_file" != x- && { echo "$as_me:$LINENO: creating $ac_file" >&5 echo "$as_me: creating $ac_file" >&6;} # First look for the input files in the build tree, otherwise in the # src tree. ac_file_inputs=`IFS=: for f in $ac_file_in; do case $f in -) echo $tmp/stdin ;; [\\/$]*) # Absolute (can't be DOS-style, as IFS=:) test -f "$f" || { { echo "$as_me:$LINENO: error: cannot find input file: $f" >&5 echo "$as_me: error: cannot find input file: $f" >&2;} { (exit 1); exit 1; }; } # Do quote $f, to prevent DOS paths from being IFS'd. echo "$f";; *) # Relative if test -f "$f"; then # Build tree echo "$f" elif test -f "$srcdir/$f"; then # Source tree echo "$srcdir/$f" else # /dev/null tree { { echo "$as_me:$LINENO: error: cannot find input file: $f" >&5 echo "$as_me: error: cannot find input file: $f" >&2;} { (exit 1); exit 1; }; } fi;; esac done` || { (exit 1); exit 1; } # Remove the trailing spaces. sed 's/[ ]*$//' $ac_file_inputs >$tmp/in # Handle all the #define templates only if necessary. if grep "^[ ]*#[ ]*define" $tmp/in >/dev/null; then # If there are no defines, we may have an empty if/fi : cat >$tmp/defines.sed <$tmp/out rm -f $tmp/in mv $tmp/out $tmp/in fi # grep # Handle all the #undef templates cat >$tmp/undefs.sed <$tmp/out rm -f $tmp/in mv $tmp/out $tmp/in # Let's still pretend it is `configure' which instantiates (i.e., don't # use $as_me), people would be surprised to read: # /* config.h. Generated by config.status. */ if test x"$ac_file" = x-; then echo "/* Generated by configure. */" >$tmp/config.h else echo "/* $ac_file. Generated by configure. */" >$tmp/config.h fi cat $tmp/in >>$tmp/config.h rm -f $tmp/in if test x"$ac_file" != x-; then if diff $ac_file $tmp/config.h >/dev/null 2>&1; then { echo "$as_me:$LINENO: $ac_file is unchanged" >&5 echo "$as_me: $ac_file is unchanged" >&6;} else ac_dir=`(dirname "$ac_file") 2>/dev/null || $as_expr X"$ac_file" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ X"$ac_file" : 'X\(//\)[^/]' \| \ X"$ac_file" : 'X\(//\)$' \| \ X"$ac_file" : 'X\(/\)' \| \ . : '\(.\)' 2>/dev/null || echo X"$ac_file" | sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ s//\1/; q; } /^X\(\/\/\)[^/].*/{ s//\1/; q; } /^X\(\/\/\)$/{ s//\1/; q; } /^X\(\/\).*/{ s//\1/; q; } s/.*/./; q'` { if $as_mkdir_p; then mkdir -p "$ac_dir" else as_dir="$ac_dir" as_dirs= while test ! -d "$as_dir"; do as_dirs="$as_dir $as_dirs" as_dir=`(dirname "$as_dir") 2>/dev/null || $as_expr X"$as_dir" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ X"$as_dir" : 'X\(//\)[^/]' \| \ X"$as_dir" : 'X\(//\)$' \| \ X"$as_dir" : 'X\(/\)' \| \ . : '\(.\)' 2>/dev/null || echo X"$as_dir" | sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ s//\1/; q; } /^X\(\/\/\)[^/].*/{ s//\1/; q; } /^X\(\/\/\)$/{ s//\1/; q; } /^X\(\/\).*/{ s//\1/; q; } s/.*/./; q'` done test ! -n "$as_dirs" || mkdir $as_dirs fi || { { echo "$as_me:$LINENO: error: cannot create directory \"$ac_dir\"" >&5 echo "$as_me: error: cannot create directory \"$ac_dir\"" >&2;} { (exit 1); exit 1; }; }; } rm -f $ac_file mv $tmp/config.h $ac_file fi else cat $tmp/config.h rm -f $tmp/config.h fi # Compute $ac_file's index in $config_headers. _am_stamp_count=1 for _am_header in $config_headers :; do case $_am_header in $ac_file | $ac_file:* ) break ;; * ) _am_stamp_count=`expr $_am_stamp_count + 1` ;; esac done echo "timestamp for $ac_file" >`(dirname $ac_file) 2>/dev/null || $as_expr X$ac_file : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ X$ac_file : 'X\(//\)[^/]' \| \ X$ac_file : 'X\(//\)$' \| \ X$ac_file : 'X\(/\)' \| \ . : '\(.\)' 2>/dev/null || echo X$ac_file | sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ s//\1/; q; } /^X\(\/\/\)[^/].*/{ s//\1/; q; } /^X\(\/\/\)$/{ s//\1/; q; } /^X\(\/\).*/{ s//\1/; q; } s/.*/./; q'`/stamp-h$_am_stamp_count done # # CONFIG_COMMANDS section. # for ac_file in : $CONFIG_COMMANDS; do test "x$ac_file" = x: && continue ac_dest=`echo "$ac_file" | sed 's,:.*,,'` ac_source=`echo "$ac_file" | sed 's,[^:]*:,,'` ac_dir=`(dirname "$ac_dest") 2>/dev/null || $as_expr X"$ac_dest" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ X"$ac_dest" : 'X\(//\)[^/]' \| \ X"$ac_dest" : 'X\(//\)$' \| \ X"$ac_dest" : 'X\(/\)' \| \ . : '\(.\)' 2>/dev/null || echo X"$ac_dest" | sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ s//\1/; q; } /^X\(\/\/\)[^/].*/{ s//\1/; q; } /^X\(\/\/\)$/{ s//\1/; q; } /^X\(\/\).*/{ s//\1/; q; } s/.*/./; q'` { if $as_mkdir_p; then mkdir -p "$ac_dir" else as_dir="$ac_dir" as_dirs= while test ! -d "$as_dir"; do as_dirs="$as_dir $as_dirs" as_dir=`(dirname "$as_dir") 2>/dev/null || $as_expr X"$as_dir" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ X"$as_dir" : 'X\(//\)[^/]' \| \ X"$as_dir" : 'X\(//\)$' \| \ X"$as_dir" : 'X\(/\)' \| \ . : '\(.\)' 2>/dev/null || echo X"$as_dir" | sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ s//\1/; q; } /^X\(\/\/\)[^/].*/{ s//\1/; q; } /^X\(\/\/\)$/{ s//\1/; q; } /^X\(\/\).*/{ s//\1/; q; } s/.*/./; q'` done test ! -n "$as_dirs" || mkdir $as_dirs fi || { { echo "$as_me:$LINENO: error: cannot create directory \"$ac_dir\"" >&5 echo "$as_me: error: cannot create directory \"$ac_dir\"" >&2;} { (exit 1); exit 1; }; }; } ac_builddir=. if test "$ac_dir" != .; then ac_dir_suffix=/`echo "$ac_dir" | sed 's,^\.[\\/],,'` # A "../" for each directory in $ac_dir_suffix. ac_top_builddir=`echo "$ac_dir_suffix" | sed 's,/[^\\/]*,../,g'` else ac_dir_suffix= ac_top_builddir= fi case $srcdir in .) # No --srcdir option. We are building in place. ac_srcdir=. if test -z "$ac_top_builddir"; then ac_top_srcdir=. else ac_top_srcdir=`echo $ac_top_builddir | sed 's,/$,,'` fi ;; [\\/]* | ?:[\\/]* ) # Absolute path. ac_srcdir=$srcdir$ac_dir_suffix; ac_top_srcdir=$srcdir ;; *) # Relative path. ac_srcdir=$ac_top_builddir$srcdir$ac_dir_suffix ac_top_srcdir=$ac_top_builddir$srcdir ;; esac # Do not use `cd foo && pwd` to compute absolute paths, because # the directories may not exist. case `pwd` in .) ac_abs_builddir="$ac_dir";; *) case "$ac_dir" in .) ac_abs_builddir=`pwd`;; [\\/]* | ?:[\\/]* ) ac_abs_builddir="$ac_dir";; *) ac_abs_builddir=`pwd`/"$ac_dir";; esac;; esac case $ac_abs_builddir in .) ac_abs_top_builddir=${ac_top_builddir}.;; *) case ${ac_top_builddir}. in .) ac_abs_top_builddir=$ac_abs_builddir;; [\\/]* | ?:[\\/]* ) ac_abs_top_builddir=${ac_top_builddir}.;; *) ac_abs_top_builddir=$ac_abs_builddir/${ac_top_builddir}.;; esac;; esac case $ac_abs_builddir in .) ac_abs_srcdir=$ac_srcdir;; *) case $ac_srcdir in .) ac_abs_srcdir=$ac_abs_builddir;; [\\/]* | ?:[\\/]* ) ac_abs_srcdir=$ac_srcdir;; *) ac_abs_srcdir=$ac_abs_builddir/$ac_srcdir;; esac;; esac case $ac_abs_builddir in .) ac_abs_top_srcdir=$ac_top_srcdir;; *) case $ac_top_srcdir in .) ac_abs_top_srcdir=$ac_abs_builddir;; [\\/]* | ?:[\\/]* ) ac_abs_top_srcdir=$ac_top_srcdir;; *) ac_abs_top_srcdir=$ac_abs_builddir/$ac_top_srcdir;; esac;; esac { echo "$as_me:$LINENO: executing $ac_dest commands" >&5 echo "$as_me: executing $ac_dest commands" >&6;} case $ac_dest in depfiles ) test x"$AMDEP_TRUE" != x"" || for mf in $CONFIG_FILES; do # Strip MF so we end up with the name of the file. mf=`echo "$mf" | sed -e 's/:.*$//'` # Check whether this is an Automake generated Makefile or not. # We used to match only the files named `Makefile.in', but # some people rename them; so instead we look at the file content. # Grep'ing the first line is not enough: some people post-process # each Makefile.in and add a new line on top of each file to say so. # So let's grep whole file. if grep '^#.*generated by automake' $mf > /dev/null 2>&1; then dirpart=`(dirname "$mf") 2>/dev/null || $as_expr X"$mf" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ X"$mf" : 'X\(//\)[^/]' \| \ X"$mf" : 'X\(//\)$' \| \ X"$mf" : 'X\(/\)' \| \ . : '\(.\)' 2>/dev/null || echo X"$mf" | sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ s//\1/; q; } /^X\(\/\/\)[^/].*/{ s//\1/; q; } /^X\(\/\/\)$/{ s//\1/; q; } /^X\(\/\).*/{ s//\1/; q; } s/.*/./; q'` else continue fi # Extract the definition of DEPDIR, am__include, and am__quote # from the Makefile without running `make'. DEPDIR=`sed -n 's/^DEPDIR = //p' < "$mf"` test -z "$DEPDIR" && continue am__include=`sed -n 's/^am__include = //p' < "$mf"` test -z "am__include" && continue am__quote=`sed -n 's/^am__quote = //p' < "$mf"` # When using ansi2knr, U may be empty or an underscore; expand it U=`sed -n 's/^U = //p' < "$mf"` # Find all dependency output files, they are included files with # $(DEPDIR) in their names. We invoke sed twice because it is the # simplest approach to changing $(DEPDIR) to its actual value in the # expansion. for file in `sed -n " s/^$am__include $am__quote\(.*(DEPDIR).*\)$am__quote"'$/\1/p' <"$mf" | \ sed -e 's/\$(DEPDIR)/'"$DEPDIR"'/g' -e 's/\$U/'"$U"'/g'`; do # Make sure the directory exists. test -f "$dirpart/$file" && continue fdir=`(dirname "$file") 2>/dev/null || $as_expr X"$file" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ X"$file" : 'X\(//\)[^/]' \| \ X"$file" : 'X\(//\)$' \| \ X"$file" : 'X\(/\)' \| \ . : '\(.\)' 2>/dev/null || echo X"$file" | sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ s//\1/; q; } /^X\(\/\/\)[^/].*/{ s//\1/; q; } /^X\(\/\/\)$/{ s//\1/; q; } /^X\(\/\).*/{ s//\1/; q; } s/.*/./; q'` { if $as_mkdir_p; then mkdir -p $dirpart/$fdir else as_dir=$dirpart/$fdir as_dirs= while test ! -d "$as_dir"; do as_dirs="$as_dir $as_dirs" as_dir=`(dirname "$as_dir") 2>/dev/null || $as_expr X"$as_dir" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ X"$as_dir" : 'X\(//\)[^/]' \| \ X"$as_dir" : 'X\(//\)$' \| \ X"$as_dir" : 'X\(/\)' \| \ . : '\(.\)' 2>/dev/null || echo X"$as_dir" | sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ s//\1/; q; } /^X\(\/\/\)[^/].*/{ s//\1/; q; } /^X\(\/\/\)$/{ s//\1/; q; } /^X\(\/\).*/{ s//\1/; q; } s/.*/./; q'` done test ! -n "$as_dirs" || mkdir $as_dirs fi || { { echo "$as_me:$LINENO: error: cannot create directory $dirpart/$fdir" >&5 echo "$as_me: error: cannot create directory $dirpart/$fdir" >&2;} { (exit 1); exit 1; }; }; } # echo "creating $dirpart/$file" echo '# dummy' > "$dirpart/$file" done done ;; esac done { (exit 0); exit 0; } ruby-mkrf-0.2.3.orig/test/sample_files/syck-0.55/README.BYTECODE0000644000000000000000000004550011672453175022143 0ustar rootroot# # Reflects Oren's comments, adds yamlbyte.h at the bottom # subject: Revision #4 of YAML Bytecodes summary: > This proposal defines a 'preparsed' format where a YAML syntax is converted into a series of events, as bytecodes. Each bytecode appears on its own line, starting with a single character and ending with a line feed character, '\n'. codes: # # Primary Bytecodes (Capital Letters) # # These bytecodes form the minimum needed to represent YAML information # from the serial model (ie, without format and comments) # 'D': name: Document desc: > Indicates that a document has begun, either it is the beginning of a YAML stream, or a --- has been found. Thus, an empty document is expressed as "D\n" 'V': name: Directive desc: > This represents any YAML directives immediately following a 'D' bytecode. For example '--- %YAML:1.0' produces the bytecode "D\nVYAML:1.0\n". 'P': name: Pause Stream desc: > This is the instruction when a document is terminated, but another document has not yet begun. Thus, it is optional, and typically used to pause parsing. For example, a stream starting with an empty document, but then in a hold state for the next document would be: "D\nP\n" '\z': name: Finish (end stream) desc: > YAML bytecodes are meant to be passable as a single "C" string, and thus the null terminator can optionally be used to signal the end of a stream. When writing bytecodes out to a flat file, the file need not contain a null terminator; however, when read into memory it should always have a null terminator. 'M': name: Mapping desc: > Indicates the begin of a mapping, children of the mapping are provided as a series of K1,V1,K2,V2 pairs as they are found in the input stream. For example, the bytecodes for "{ a: b, c: d }" would be "M\nSa\nSb\nSc\nSd\nE\n" 'Q': name: Sequence desc: > Indicates the begin of a sequence, children are provided following till a '.' bytecode is encountered. So, the bytecodes for "[ one, two ]" would be "Q\nSone\nStwo\nE\n" 'E': name: End Collection desc: > This closes the outermost Collection (Mapping, Sequence), note that the document has one and only one node following it, therefore it is not a branch. 'S': name: Scalar desc: > This indicates the start of a scalar value, which can be continued by the 'N' and 'C' bytecodes. This bytecode is used for sequence entries, keys, values, etc. 'C': name: Scalar Continuation desc: > Since a scalar may not fit within a buffer, and since it may not contain a \n character, it may have to be broken into several chunks. 'N': name: Normalized New Line (in a scalar value) desc: > Scalar values must be chunked so that new lines and null values do not occur within a 'S' or 'C' bytecode (in the bytecodes, all other C0 need not be escaped). This bytecode is then used to represent one or more newlines, with the number of newlines optionally following. For example, "Hello\nWorld" would be "SHello\nN\nCWorld\n", and "Hello\n\n\nWorld" is "SHello\nN3\nCWorld\n" If the new line is an LS or a PS, the N bytecode can be followed with a L or P. Thus, "Hello\PWorld\L" is reported "SHello\nNP\nWorld\NL\n" 'Z': name: Null Character (in a scalar value) desc: > As in normalized new lines above, since the null character cannot be used in the bytecodes, is must be escaped, ie, "Hello\zWorld" would be "SHello\nZ\nCWorld\n". 'A': name: Alias desc: > This is used when ever there is an alias node, for example, "[ &X one, *X ]" would be normalized to "S\nAX\nSone\nRX\nE\n" -- in this example, the anchor bytecode applies to the very next content bytecode. 'R': name: Reference (Anchor) desc: > This bytecode associates an anchor with the very next content node, see the 'A' alias bytecode. 'T': name: Transfer desc: > This is the transfer method. If the value begins with a '!', then it is not normalized. Otherwise, the value is a fully qualified URL, with a semicolon. The transfer method applies only to the node immediately following, and thus it can be seen as a modifier like the anchor. For example, "Ttag:yaml.org,2002:str\nSstring\n" is normalized, "T!str\nSstring\n" is not. # # Formatting bytecodes (lower case) # # The following bytecodes are purely at the syntax level and # useful for pretty printers and emitters. Since the range of # lower case letters is contiguous, it could be easy for a # processor to simply ignore all bytecodes in this range. # 'c': name: Comment desc: > This is a single line comment. It is terminated like all of the other variable length items, with a '\n'. 'i': name: Indent desc: > Specifies number of additional spaces to indent for subsequent block style nodes, "i4\n" specifies 4 char indent. 's': name: Scalar styling desc: > This bytecode, is followed with one of the following items to indicate the style to be used for the very next content node. It is an error to specify a style for a scalar other than double quoted when it must be escaped. Furthermore, there must be agreement between the style and the very next content node, in other words, a scalar style requires that the next content node be an S. > flow scalar " double quoted scalar ' single quoted scalar | literal scalar p plain scalar { inline mapping [ inline sequence b block style (for mappings and sequences'") # # Advanced bytecodes (not alphabetic) # # These are optional goodies which one could find useful. # '#': name: Line Number desc: > This bytecode allows the line number of the very next node to be reported. '!': name: Notice desc: > This is a message sent from the producer to the consumer regarding the state of the stream or document. It does not necessarly end a stream, as the 'finish' bytecode can be used for this purpose. This signal has a packed format, with the error number, a comma, and a textual message: "#22\n!73,Indentation mismatch\n" "#132\n!84,Tabs are illegal for indentation\n" ',': name: Span desc: > This bytecode gives the span of the very next 'S', 'M', or 'Q' bytecode -- including its subordinates. For scalars, it includes the span of all subordinate 'N' and 'C' codes. For mappings or sequences, this gives the length all the way to the corresponding 'E' bytecode so that the entire branch can be skipped. The length is given starting at the corresponding 'S', 'M' or 'Q' bytecode and extends to the first character following subordinate nodes. Since this length instruction is meant to be used to 'speed' things up, and since calculating the length via hand is not really ideal, the length is expressed in Hex. This will allow programs to easily convert the length to an actual value (converting from hex to integers is easier than decimal). Furthermore, all leading x's are ignored (so that they can be filled in later) and if the bytecode value is all x's, then the length is unknown. Lastly, this length is expressed in 8 bit units for UTF-8, and 16 bit units for UTF-16. For example, --- [[one, two], three] Is expressed as, "?25\nD\n?x1E\nQ\n?xxE\nQ\nSone\nStwo\nE\nSthree\nE\n" Thus it is seen that the address of D plus 37 is the null terminator for the string, the first 'Q' plus 30 also gives the null teriminator, and the second 'Q' plus 14 jumps to the opening 'S' for the third scalar. '@': name: Allocate desc: > This is a hint telling the processor how many items are in the following collection (mapping pairs, or sequence values), or how many character units need to be allocated to hold the next value. Clearly this is encoding specific value. The length which follows is in hex (not decimal). For example, "one", could be "@x3\nSone" design: - name: streaming support problem: > The interface should ideally allow for a YAML document to be moved incrementally as a stream through a process. In particular, YAML is inheritently line oriented, thus the interface should probably reflect this fundamental character. solution: > The bytecodes deliver scalars as chunks, each chunk limited to at most one line. While this is not ideal for passing large binary objects, it is simple and easy to understand. - name: push problem: > The most common 'parsers' out there for YAML are push style, where the producer owns the 'C' program stack, and the consumer keeps its state as a heap object. Ideal use of a push interface is an emitter, since this allows the sender (the application program) to use the program stack and thus keep its state on the call stack in local, automatic variables. solution: > A push interface simply can call a single event handler with a (bytecode, payload) tuple. Since the core complexity is in the bytecodes, the actual function signature is straight-forward allowing for relative language independence. Since the bytecode is always one character, the event handler could just receive a string where the tuple is implicit. - name: pull problem: > The other alternative for a streaming interface is a 'pull' mechanism, or iterator model where the consumer owns the C stack and the producer keeps any state needed as a heap object. Ideal use of a pull interface is a parser, since this allows the receiver (the application program) to use the program stack, keeping its state on the call stack in local variables. solution: > A pull interface would also be a simple function, that when called filles a buffer with binary node(s). Or, in a language with garbage collection, could be implemented as an iterator returning a string containing the bytecode line (bytecode followed immediately by the bytecode argument as a single string) or as a tuple. - name: pull2push problem: > This is done easily via a small loop which pulls from the iterator and pushes to the event handler. solution: > For python, assuming the parser is implemented as an iterator where one can 'pull' bytecode, args tuples, and assuming the emitter has a event callback taking a bytecode, args tuple, we have: def push2pull(parser, emitter): for (bytecode, args) in parser: emitter.push(bytecode, args) - name: push2pull problem: > This requires the entire YAML stream be cashed in memory, or each of the two stages in a thread or different continuation with shared memory or pipe between them. solution: > This use case seems much easier with a binary stream; that is, one need not convert the style of functions between the push vs pull pattern. And, for languages supporting continuations, (ruby) perhaps push vs pull is not even an issue... for a language like python, one would use the threaded Queue object, one thread pushes (bytecode, args) tuples into the Queue, while the other thread pulls the tuples out. Simple. - name: neutrality problem: > It would be ideal of the C Program interface was simple enough to be independent of programming language. In an ideal case, imagine a flow of YAML structured data through various processing stages on a server; where each processing stage is written in a different programming language. solution: > While it may be hard for each language to write a syntax parser filled with all of the little details, it would be much much easier to write a parser for these bytecodes; as it involves simple string handling, dispatching on the first character in each string. - name: tools problem: > A goal of mine is to have a YPATH expression language, a schema language, and a transformation language. I would like these items to be reusable by a great number of platforms/languages, and in particular as its own callable processing stage. solution: > If such an expression language was written on top of a bytecode format like this, via a simple pull function (/w adapters for push2pull and pull2push) quite a bit of reusability could emerge. Imagine a schema validator which is injected into the bytecode stream and it is an identity operation unless an exception occurs, in which case, it terminates the document and makes the next document be a description of the validation error. - name: encoding problem: > Text within the bytecode format must be given an encoding. There are several considerations at hand listed below. solution: > The YAML bytecode format uses the same encodings as YAML itself, and thus is independent of actual encoding. A parser library should have several functions to convert between the encodings. examples: - yaml: | --- - plain - > this is a flow scalar - > another flow scalar which is continued on a second line and indented 2 spaces - &001 !str | This is a block scalar, both typed and anchored - *001 # this was an alias - "This is a \"double quoted\" scalar" bytecode: | D Q Splain f Sthis is a flow scalar Sanother flow scalar which is continued Con a second line and indented 2 spaces b a001 t!str SThis is a block scalar, both typed N Cand anchored R001 cthis was an alias d SThis is a "double quoted" scalar E cheader: | /* yamlbyte.h * * The YAML bytecode "C" interface header file. See the YAML bytecode * reference for bytecode sequence rules and for the meaning of each * bytecode. */ #ifndef YAMLBYTE_H #define YAMLBYTE_H #include /* list out the various YAML bytecodes */ typedef enum { /* content bytecodes */ YAML_FINISH = 0, YAML_DOCUMENT = 'D', YAML_DIRECTIVE = 'V', YAML_PAUSE = 'P', YAML_MAPPING = 'M', YAML_SEQUENCE = 'S', YAML_ENDMAPSEQ = 'E', YAML_SCALAR = 'S', YAML_CONTINUE = 'C', YAML_NEWLINE = 'N', YAML_NULLCHAR = 'Z', YAML_ALIAS = 'A', YAML_ANCHOR = 'R', YAML_TRANSFER = 'T', /* formatting bytecodes */ YAML_COMMENT = 'c', YAML_INDENT = 'i', YAML_STYLE = 's', /* other bytecodes */ YAML_LINENUMBER = '#', YAML_NOTICE = '!', YAML_SPAN = ',', YAML_ALLOC = '@' } yaml_code_t; /* additional modifiers for the YAML_STYLE bytecode */ typedef enum { YAML_FLOW = '>', YAML_LITERAL = '|', YAML_BLOCK = 'b', YAML_PLAIN = 'p', YAML_INLINE_MAPPING = '{', YAML_INLINE_SEQUENCE = '}', YAML_SINGLE_QUOTED = 39, YAML_DOUBLE_QUOTED = '"' } yaml_style_t; typedef unsigned char yaml_utf8_t; typedef unsigned short yaml_utf16_t; #ifdef YAML_UTF8 #ifdef YAML_UTF16 #error Must only define YAML_UTF8 or YAML_UTF16 #endif typedef yaml_utf8_t yaml_char_t; #else #ifdef YAML_UTF16 typedef yaml_utf16_t yaml_char_t; #else #error Must define YAML_UTF8 or YAML_UTF16 #endif #endif /* return value for push function, tell parser if you want to stop */ typedef enum { YAML_MORE = 1, /* producer should continue to fire events */ YAML_STOP = 0 /* producer should stop firing events */ } yaml_more_t; /* push bytecodes from a producer to a consumer * where arg is null terminated /w a length */ typedef void * yaml_consumer_t; typedef yaml_more_t (*yaml_push_t)( yaml_consumer_t self, yaml_code_t code, const yaml_char_t *arg, size_t arglen ); /* pull bytecodes by the producer from the consumer, where * producer must null terminate buff and return the number * of sizeof(yaml_char_t) bytes used */ typedef void * yaml_producer_t; typedef size_t (*yaml_pull_t)( yaml_producer_t self, yaml_code_t *code, yaml_char_t *buff, /* at least 1K buffer */ size_t buffsize ); /* returns number of bytes used in the buffer */ /* canonical helper to show how to hook up a parser (as a push * producer) to an emitter (as a push consumer) */ #define YAML_PULL2PUSH(pull, producer, push, consumer) \ do { \ yaml_code_t code = YAML_NOTICE; \ yaml_more_t more = YAML_CONTINUE; \ yaml_char_t buff[1024]; \ size_t size = 0; \ memset(buff, 0, 1024 * sizeof(yaml_char_t)); \ while( code && more) { \ size = (pull)((producer),&code, buff, 1024); \ assert(size < 1024 && !buff[size]); \ more = (push)((consumer),code, buff, size); \ } \ buff[0] = 0; \ (push)((consumer),YAML_FINISH, buff, 0); \ } while(1) #endif ruby-mkrf-0.2.3.orig/test/sample_files/libxml-ruby-0.3.8/0000755000000000000000000000000011672453175021436 5ustar rootrootruby-mkrf-0.2.3.orig/test/sample_files/libxml-ruby-0.3.8/README0000644000000000000000000000670111672453175022322 0ustar rootroot== INSTALLATION Installation is simple. Follow the following steps: === Rubygems gem install libxml-ruby === Tarball/zip $ rake test $ rake install If extconf yacks up an error, follow the instructions it provides. You will need to chdir to ext/xml and run 'ruby extconf.rb' to provide options, after which you can either use Rake for everything or do with make (make && make install). Once installed, look at the test scripts (tests/*.rb), and run 'rake doc' to generate API documentation. You can find the latest documentation at: * http://libxml.rubyforge.org/doc == DEPENDENCIES libxml requires a few other libraries to be installed inorder to function properly. * libm (math routines: very standard) * libz (zlib) * libiconv * libxml2 == USAGE Basic usage for reading and writing documents. === WRITING Writing a simple document: # require 'rubygems' # if installed via Gems require 'xml/libxml' doc = XML::Document.new() doc.root = XML::Node.new('root_node') root = doc.root root << elem1 = XML::Node.new('elem1') elem1['attr1'] = 'val1' elem1['attr2'] = 'val2' root << elem2 = XML::Node.new('elem2') elem2['attr1'] = 'val1' elem2['attr2'] = 'val2' root << elem3 = XML::Node.new('elem3') elem3 << elem4 = XML::Node.new('elem4') elem3 << elem5 = XML::Node.new('elem5') elem5 << elem6 = XML::Node.new('elem6') elem6 << 'Content for element 6' elem3['attr'] = 'baz' # Namespace hack to reduce the numer of times XML:: is typed include XML root << elem7 = Node.new('foo') 1.upto(10) do |i| elem7 << n = Node.new('bar') n << i end format = true doc.save('output.xml', format) The file output.xml contains: Content for element 6 1 2 3 4 5 6 7 8 9 10 === READING Reading XML is slightly more complex and there are many more ways to perform this operation. This reads in and processes the above generated XML document, output.xml. This script assumes that the structure of the document is already known. # require 'rubygems' # if installed via Gems require 'xml/libxml' doc = XML::Document.file('output.xml') root = doc.root puts "Root element name: #{root.name}" elem3 = root.find('elem3').to_a.first puts "Elem3: #{elem3['attr']}" doc.find('//root_node/foo/bar').each do |node| puts "Node path: #{node.path} \t Contents: #{node}" end And your terminal should look like: Root element name: root_node Elem3: baz Node path: /root_node/foo/bar[1] Contents: 1 Node path: /root_node/foo/bar[2] Contents: 2 Node path: /root_node/foo/bar[3] Contents: 3 Node path: /root_node/foo/bar[4] Contents: 4 Node path: /root_node/foo/bar[5] Contents: 5 Node path: /root_node/foo/bar[6] Contents: 6 Node path: /root_node/foo/bar[7] Contents: 7 Node path: /root_node/foo/bar[8] Contents: 8 Node path: /root_node/foo/bar[9] Contents: 9 Node path: /root_node/foo/bar[10] Contents: 10 == MORE INFORMATION If you have any questions, please send email to libxml-devel@rubyforge.org. # $Id: README,v 1.5 2006/04/24 19:29:49 roscopeco Exp $ ruby-mkrf-0.2.3.orig/test/sample_files/libxml-ruby-0.3.8/ext/0000755000000000000000000000000011672453175022236 5ustar rootrootruby-mkrf-0.2.3.orig/test/sample_files/libxml-ruby-0.3.8/ext/xml/0000755000000000000000000000000011672453175023036 5ustar rootrootruby-mkrf-0.2.3.orig/test/sample_files/libxml-ruby-0.3.8/ext/xml/ruby_xml_document.h0000644000000000000000000000200111672453175026737 0ustar rootroot/* $Id: ruby_xml_document.h,v 1.1 2006/02/21 20:40:16 roscopeco Exp $ */ /* Please see the LICENSE file for copyright and distribution information */ #ifndef __RUBY_XML_DOCUMENT__ #define __RUBY_XML_DOCUMENT__ extern VALUE cXMLDocument; typedef struct rxp_document { xmlDocPtr doc; /* Tree/DOM interface */ int data_type; /* The data type referenced by *data */ void *data; /* Pointer to an external structure of options */ int is_ptr; /* Determines if this object owns its data or points to it someplace else */ VALUE xmlver; /* T_STRING with the xml version */ } ruby_xml_document; VALUE ruby_xml_document_filename_get(VALUE self); void ruby_xml_document_free(ruby_xml_document *rxd); VALUE ruby_xml_document_new(VALUE class, xmlDocPtr doc); VALUE ruby_xml_document_new2(VALUE class, VALUE xmlver); VALUE ruby_xml_document_new3(VALUE class); VALUE ruby_xml_document_new4(VALUE class, xmlDocPtr doc); VALUE ruby_xml_document_root_get(VALUE self); void ruby_init_xml_document(void); #endif ruby-mkrf-0.2.3.orig/test/sample_files/libxml-ruby-0.3.8/ext/xml/ruby_xml_xpointer_context.c0000644000000000000000000000126411672453175030542 0ustar rootroot/* $Id: ruby_xml_xpointer_context.c,v 1.2 2006/02/27 12:55:32 roscopeco Exp $ */ /* Please see the LICENSE file for copyright and distribution information */ #include "libxml.h" #include "ruby_xml_xpointer_context.h" VALUE cXMLXPointerContext; VALUE eXMLXPointerContextInvalidPath; // Rdoc needs to know #ifdef RDOC_NEVER_DEFINED mXML = rb_define_module("XML"); cXMLXPointer = rb_define_class_under(mXML, "XPointer", rb_cObject); #endif void ruby_init_xml_xpointer_context(void) { cXMLXPointerContext = rb_define_class_under(cXMLXPointer, "Context", cXMLXPathContext); eXMLXPointerContextInvalidPath = rb_define_class_under(cXMLXPointerContext, "InvalidPath", rb_eException); } ruby-mkrf-0.2.3.orig/test/sample_files/libxml-ruby-0.3.8/ext/xml/cbg.c0000644000000000000000000000430111672453175023733 0ustar rootroot#include #include #include "ruby.h" /* int xmlRegisterInputCallbacks (xmlInputMatchCallback matchFunc, xmlInputOpenCallback openFunc, xmlInputReadCallback readFunc, xmlInputCloseCallback closeFunc); int (*xmlInputMatchCallback) (char const *filename); void* (*xmlInputOpenCallback) (char const *filename); int (*xmlInputReadCallback) (void *context, char *buffer, int len); int (*xmlInputCloseCallback) (void *context); */ typedef struct deb_doc_context { char *buffer; char *bpos; int remaining; } deb_doc_context; int deb_Match (char const *filename) { fprintf( stderr, "deb_Match: %s\n", filename ); if (!xmlStrncasecmp(BAD_CAST filename, BAD_CAST "deb://", 6)) { return(1); } return(0); } void* deb_Open (char const *filename) { deb_doc_context *deb_doc; VALUE res; deb_doc = (deb_doc_context*)malloc( sizeof(deb_doc_context) ); res = rb_funcall( rb_funcall( rb_mKernel, rb_intern("const_get"), 1, rb_str_new2("DEBSystem") ), rb_intern("document_query"), 1, rb_str_new2(filename)); deb_doc->buffer = strdup( StringValuePtr(res) ); //deb_doc->buffer = strdup("serepes"); deb_doc->bpos = deb_doc->buffer; deb_doc->remaining = strlen(deb_doc->buffer); return deb_doc; } int deb_Read (void *context, char *buffer, int len) { deb_doc_context *deb_doc; int ret_len; deb_doc = (deb_doc_context*)context; if (len >= deb_doc->remaining) { ret_len = deb_doc->remaining; } else { ret_len = len; } deb_doc->remaining -= ret_len; strncpy( buffer, deb_doc->bpos, ret_len ); deb_doc->bpos += ret_len; return ret_len; } int deb_Close (void *context) { free( ((deb_doc_context*)context)->buffer ); free( context ); return 1; } void deb_register_cbg() { xmlRegisterInputCallbacks( deb_Match, deb_Open, deb_Read, deb_Close ); } ruby-mkrf-0.2.3.orig/test/sample_files/libxml-ruby-0.3.8/ext/xml/ruby_xml_xpath_context.h0000644000000000000000000000145711672453175030027 0ustar rootroot/* $Id: ruby_xml_xpath_context.h,v 1.2 2006/02/27 12:55:32 roscopeco Exp $ */ /* Please see the LICENSE file for copyright and distribution information */ #ifndef __RUBY_XML_XPATH_CONTEXT__ #define __RUBY_XML_XPATH_CONTEXT__ extern VALUE cXMLXPathContext; typedef struct ruby_xml_xpath_context { VALUE xd; xmlXPathContextPtr ctxt; } ruby_xml_xpath_context; void ruby_xml_xpath_context_free(ruby_xml_xpath_context *rxxpc); VALUE ruby_xml_xpath_context_new(VALUE class, VALUE xd, xmlXPathContextPtr ctxt); VALUE ruby_xml_xpath_context_new2(VALUE xd, xmlXPathContextPtr ctxt); VALUE ruby_xml_xpath_context_new3(VALUE xd); VALUE ruby_xml_xpath_context_new4(VALUE rnode); VALUE ruby_xml_xpath_context_register_namespace(VALUE self, VALUE prefix, VALUE uri); void ruby_init_xml_xpath_context(void); #endif ruby-mkrf-0.2.3.orig/test/sample_files/libxml-ruby-0.3.8/ext/xml/ruby_xml_ns.c0000644000000000000000000000613311672453175025546 0ustar rootroot/* $Id: ruby_xml_ns.c,v 1.1 2006/02/21 20:40:16 roscopeco Exp $ */ /* Please see the LICENSE file for copyright and distribution information */ #include "libxml.h" #include "ruby_xml_ns.h" VALUE cXMLNS; /* * call-seq: * ns.href => "href" * * Obtain the namespace's href. */ VALUE ruby_xml_ns_href_get(VALUE self) { ruby_xml_ns *rxns; Data_Get_Struct(self, ruby_xml_ns, rxns); if (rxns->ns == NULL || rxns->ns->href == NULL) return(Qnil); else return(rb_str_new2((const char*)rxns->ns->href)); } /* * call-seq: * ns.href? => (true|false) * * Determine whether this namespace has an href. */ VALUE ruby_xml_ns_href_q(VALUE self) { ruby_xml_ns *rxns; Data_Get_Struct(self, ruby_xml_ns, rxns); if (rxns->ns == NULL || rxns->ns->href == NULL) return(Qfalse); else return(Qtrue); } void ruby_xml_ns_free(ruby_xml_ns *rxns) { if (rxns->ns != NULL && !rxns->is_ptr) { xmlFreeNs(rxns->ns); rxns->ns = NULL; } free(rxns); } static void ruby_xml_ns_mark(ruby_xml_ns *rxns) { if (rxns == NULL) return; if (!NIL_P(rxns->xd)) rb_gc_mark(rxns->xd); } VALUE ruby_xml_ns_new(VALUE class, VALUE xd, xmlNsPtr ns) { ruby_xml_ns *rxns; rxns = ALLOC(ruby_xml_ns); rxns->is_ptr = 0; rxns->ns = ns; rxns->xd = xd; return(Data_Wrap_Struct(class, ruby_xml_ns_mark, ruby_xml_ns_free, rxns)); } VALUE ruby_xml_ns_new2(VALUE class, VALUE xd, xmlNsPtr ns) { ruby_xml_ns *rxns; rxns = ALLOC(ruby_xml_ns); rxns->is_ptr = 1; rxns->ns = ns; rxns->xd = xd; return(Data_Wrap_Struct(class, ruby_xml_ns_mark, ruby_xml_ns_free, rxns)); } /* * call-seq: * ns.next => ns * * Obtain the next namespace. */ VALUE ruby_xml_ns_next(VALUE self) { ruby_xml_ns *rxns; Data_Get_Struct(self, ruby_xml_ns, rxns); if (rxns->ns == NULL || rxns->ns->next == NULL) return(Qnil); else return(ruby_xml_ns_new2(cXMLNS, rxns->xd, rxns->ns->next)); } /* * call-seq: * ns.prefix => "prefix" * ns.to_s => "prefix" * * Obtain the namespace's prefix. */ VALUE ruby_xml_ns_prefix_get(VALUE self) { ruby_xml_ns *rxns; Data_Get_Struct(self, ruby_xml_ns, rxns); if (rxns->ns == NULL || rxns->ns->prefix == NULL) return(Qnil); else return(rb_str_new2((const char*)rxns->ns->prefix)); } /* * call-seq: * ns.prefix? => (true|false) * * Determine whether this namespace has a prefix. */ VALUE ruby_xml_ns_prefix_q(VALUE self) { ruby_xml_ns *rxns; Data_Get_Struct(self, ruby_xml_ns, rxns); if (rxns->ns == NULL || rxns->ns->prefix == NULL) return(Qfalse); else return(Qtrue); } // Rdoc needs to know #ifdef RDOC_NEVER_DEFINED mXML = rb_define_module("XML"); #endif void ruby_init_xml_ns(void) { cXMLNS = rb_define_class_under(mXML, "NS", rb_cObject); rb_define_method(cXMLNS, "href", ruby_xml_ns_href_get, 0); rb_define_method(cXMLNS, "href?", ruby_xml_ns_href_q, 0); rb_define_method(cXMLNS, "next", ruby_xml_ns_next, 0); rb_define_method(cXMLNS, "prefix", ruby_xml_ns_prefix_get, 0); rb_define_method(cXMLNS, "prefix?", ruby_xml_ns_prefix_q, 0); rb_define_method(cXMLNS, "to_s", ruby_xml_ns_prefix_get, 0); } ruby-mkrf-0.2.3.orig/test/sample_files/libxml-ruby-0.3.8/ext/xml/ruby_xml_tree.c0000644000000000000000000000372611672453175026072 0ustar rootroot/* $Id: ruby_xml_tree.c,v 1.1 2006/02/21 20:40:16 roscopeco Exp $ */ /* Please see the LICENSE file for copyright and distribution information */ #include "libxml.h" #include "ruby_xml_tree.h" VALUE cXMLTree; // Rdoc needs to know #ifdef RDOC_NEVER_DEFINED mXML = rb_define_module("XML"); #endif void ruby_init_xml_tree(void) { cXMLTree = rb_define_class_under(mXML, "Tree", rb_cObject); rb_define_const(cXMLTree, "ELEMENT_NODE", INT2FIX(XML_ELEMENT_NODE)); rb_define_const(cXMLTree, "ATTRIBUTE_NODE", INT2FIX(XML_ATTRIBUTE_NODE)); rb_define_const(cXMLTree, "TEXT_NODE", INT2FIX(XML_TEXT_NODE)); rb_define_const(cXMLTree, "CDATA_SECTION_NODE", INT2FIX(XML_CDATA_SECTION_NODE)); rb_define_const(cXMLTree, "ENTITY_REF_NODE", INT2FIX(XML_ENTITY_REF_NODE)); rb_define_const(cXMLTree, "ENTITY_NODE", INT2FIX(XML_ENTITY_NODE)); rb_define_const(cXMLTree, "PI_NODE", INT2FIX(XML_PI_NODE)); rb_define_const(cXMLTree, "COMMENT_NODE", INT2FIX(XML_COMMENT_NODE)); rb_define_const(cXMLTree, "DOCUMENT_NODE", INT2FIX(XML_DOCUMENT_NODE)); rb_define_const(cXMLTree, "DOCUMENT_TYPE_NODE", INT2FIX(XML_DOCUMENT_TYPE_NODE)); rb_define_const(cXMLTree, "DOCUMENT_FRAG_NODE", INT2FIX(XML_DOCUMENT_FRAG_NODE)); rb_define_const(cXMLTree, "NOTATION_NODE", INT2FIX(XML_NOTATION_NODE)); rb_define_const(cXMLTree, "HTML_DOCUMENT_NODE", INT2FIX(XML_HTML_DOCUMENT_NODE)); rb_define_const(cXMLTree, "DTD_NODE", INT2FIX(XML_DTD_NODE)); rb_define_const(cXMLTree, "ELEMENT_DECL", INT2FIX(XML_ELEMENT_DECL)); rb_define_const(cXMLTree, "ATTRIBUTE_DECL", INT2FIX(XML_ATTRIBUTE_DECL)); rb_define_const(cXMLTree, "ENTITY_DECL", INT2FIX(XML_ENTITY_DECL)); rb_define_const(cXMLTree, "NAMESPACE_DECL", INT2FIX(XML_NAMESPACE_DECL)); rb_define_const(cXMLTree, "XINCLUDE_START", INT2FIX(XML_XINCLUDE_START)); rb_define_const(cXMLTree, "XINCLUDE_END", INT2FIX(XML_XINCLUDE_END)); #ifdef LIBXML_DOCB_ENABLED rb_define_const(cXMLTree, "DOCB_DOCUMENT_NODE", INT2FIX(XML_DOCB_DOCUMENT_NODE)); #endif } ruby-mkrf-0.2.3.orig/test/sample_files/libxml-ruby-0.3.8/ext/xml/ruby_xml_document.c0000644000000000000000000006457711672453175026764 0ustar rootroot/* $Id: ruby_xml_document.c,v 1.1 2006/02/21 20:40:16 roscopeco Exp $ */ /* Please see the LICENSE file for copyright and distribution information */ #include "libxml.h" #include "ruby_xml_document.h" VALUE cXMLDocument; /* * call-seq: * document.compression => num * * Obtain this document's compression mode identifier. */ VALUE ruby_xml_document_compression_get(VALUE self) { #ifdef HAVE_ZLIB_H ruby_xml_document *rxd; int compmode; Data_Get_Struct(self, ruby_xml_document, rxd); compmode = xmlGetDocCompressMode(rxd->doc); if (compmode == -1) return(Qnil); else return(INT2NUM(compmode)); #else rb_warn("libxml not compiled with zlib support"); return(Qfalse); #endif } /* * call-seq: * document.compression = num * * Set this document's compression mode. */ VALUE ruby_xml_document_compression_set(VALUE self, VALUE num) { #ifdef HAVE_ZLIB_H ruby_xml_document *rxd; int compmode; Check_Type(num, T_FIXNUM); Data_Get_Struct(self, ruby_xml_document, rxd); if (rxd->doc == NULL) { return(Qnil); } else { xmlSetDocCompressMode(rxd->doc, NUM2INT(num)); compmode = xmlGetDocCompressMode(rxd->doc); if (compmode == -1) return(Qnil); else return(INT2NUM(compmode)); } #else rb_warn("libxml compiled without zlib support"); return(Qfalse); #endif } /* * call-seq: * document.compression? => (true|false) * * Determine whether this document is compressed. */ VALUE ruby_xml_document_compression_q(VALUE self) { #ifdef HAVE_ZLIB_H ruby_xml_document *rxd; Data_Get_Struct(self, ruby_xml_document, rxd); if (rxd->doc->compression != -1) return(Qtrue); else return(Qfalse); #else rb_warn("libxml compiled without zlib support"); return(Qfalse); #endif } /* * call-seq: * document.child => node * * Get this document's child node. */ VALUE ruby_xml_document_child_get(VALUE self) { ruby_xml_document *rxd; ruby_xml_node *rxn; VALUE node; Data_Get_Struct(self, ruby_xml_document, rxd); if (rxd->doc->children == NULL) return(Qnil); node = ruby_xml_node_new2(cXMLNode, self, rxd->doc->children); Data_Get_Struct(node, ruby_xml_node, rxn); rxn->xd = self; return(node); } /* * call-seq: * document.child? => (true|false) * * Determine whether this document has a child node. */ VALUE ruby_xml_document_child_q(VALUE self) { ruby_xml_document *rxd; Data_Get_Struct(self, ruby_xml_document, rxd); if (rxd->doc->children == NULL) return(Qfalse); else return(Qtrue); } /* * call-seq: * document.dump([stream]) => true * * Dump this document's XML to the specified IO stream. * If no stream is specified, stdout is used. */ VALUE ruby_xml_document_dump(int argc, VALUE *argv, VALUE self) { OpenFile *fptr; VALUE io; FILE *out; ruby_xml_document *rxd; Data_Get_Struct(self, ruby_xml_document, rxd); if (rxd->doc == NULL) return(Qnil); switch (argc) { case 0: io = rb_stdout; break; case 1: io = argv[0]; if (!rb_obj_is_kind_of(io, rb_cIO)) rb_raise(rb_eTypeError, "need an IO object"); break; default: rb_raise(rb_eArgError, "wrong number of arguments (0 or 1)"); } GetOpenFile(io, fptr); rb_io_check_writable(fptr); out = GetWriteFile(fptr); xmlDocDump(out, rxd->doc); return(Qtrue); } /* * call-seq: * document.debug_dump([stream]) => true * * Debug version of dump. */ VALUE ruby_xml_document_debug_dump(int argc, VALUE *argv, VALUE self) { #ifdef LIBXML_DEBUG_ENABLED OpenFile *fptr; VALUE io; FILE *out; ruby_xml_document *rxd; Data_Get_Struct(self, ruby_xml_document, rxd); if (rxd->doc == NULL) return(Qnil); switch (argc) { case 0: io = rb_stderr; break; case 1: io = argv[0]; if (!rb_obj_is_kind_of(io, rb_cIO)) rb_raise(rb_eTypeError, "need an IO object"); break; default: rb_raise(rb_eArgError, "wrong number of arguments (0 or 1)"); } GetOpenFile(io, fptr); rb_io_check_writable(fptr); out = GetWriteFile(fptr); xmlDebugDumpDocument(out, rxd->doc); return(Qtrue); #else rb_warn("libxml was compiled without debugging support. Please recompile libxml and ruby-libxml"); return(Qfalse); #endif } /* * call-seq: * document.debug_dump_head([stream]) => true * * Debug-dump this document's header to the specified IO stream. * If no stream is specified, stdout is used. */ VALUE ruby_xml_document_debug_dump_head(int argc, VALUE *argv, VALUE self) { #ifdef LIBXML_DEBUG_ENABLED OpenFile *fptr; VALUE io; FILE *out; ruby_xml_document *rxd; Data_Get_Struct(self, ruby_xml_document, rxd); if (rxd->doc == NULL) return(Qnil); switch (argc) { case 0: io = rb_stdout; break; case 1: io = argv[0]; if (!rb_obj_is_kind_of(io, rb_cIO)) rb_raise(rb_eTypeError, "need an IO object"); break; default: rb_raise(rb_eArgError, "wrong number of arguments (0 or 1)"); } GetOpenFile(io, fptr); rb_io_check_writable(fptr); out = GetWriteFile(fptr); xmlDebugDumpDocumentHead(out, rxd->doc); return(Qtrue); #else rb_warn("libxml was compiled without debugging support. Please recompile libxml and ruby-libxml"); return(Qfalse); #endif } /* * call-seq: * document.format_dump([stream], [spacing]) => true * * Dump this document's formatted XML to the specified IO stream. * If no stream is specified, stdout is used. If spacing is * specified, it must be a boolean that determines whether * spacing is used. */ VALUE ruby_xml_document_format_dump(int argc, VALUE *argv, VALUE self) { OpenFile *fptr; VALUE bool, io; FILE *out; ruby_xml_document *rxd; int size, spacing; Data_Get_Struct(self, ruby_xml_document, rxd); if (rxd->doc == NULL) return(Qnil); switch (argc) { case 0: io = rb_stdout; spacing = 1; break; case 1: io = argv[0]; if (!rb_obj_is_kind_of(io, rb_cIO)) rb_raise(rb_eTypeError, "need an IO object"); spacing = 1; break; case 2: io = argv[0]; if (!rb_obj_is_kind_of(io, rb_cIO)) rb_raise(rb_eTypeError, "need an IO object"); bool = argv[1]; if (TYPE(bool) == T_TRUE) spacing = 1; else if (TYPE(bool) == T_FALSE) spacing = 0; else rb_raise(rb_eTypeError, "incorect argument type, second argument must be bool"); break; default: rb_raise(rb_eArgError, "wrong number of arguments (0 or 1)"); } GetOpenFile(io, fptr); rb_io_check_writable(fptr); out = GetWriteFile(fptr); size = xmlDocFormatDump(out, rxd->doc, spacing); return(INT2NUM(size)); } /* * call-seq: * document.debug_format_dump([stream]) => true * * *Deprecated* in favour of format_dump. */ VALUE ruby_xml_document_debug_format_dump(int argc, VALUE *argv, VALUE self) { rb_warn("debug_format_dump has been deprecaited, use format_dump instead"); return(ruby_xml_document_format_dump(argc, argv, self)); } /* * call-seq: * document.encoding => "encoding" * * Obtain the encoding specified by this document. */ VALUE ruby_xml_document_encoding_get(VALUE self) { ruby_xml_document *rxd; Data_Get_Struct(self, ruby_xml_document, rxd); if (rxd->doc->encoding == NULL) return(Qnil); else return(rb_str_new2((const char*)rxd->doc->encoding)); } /* * call-seq: * document.encoding = "encoding" * * Set the encoding for this document. */ VALUE ruby_xml_document_encoding_set(VALUE self, VALUE encoding) { ruby_xml_document *rxd; Check_Type(encoding, T_STRING); Data_Get_Struct(self, ruby_xml_document, rxd); rxd->doc->encoding = (xmlChar*)ruby_strdup(StringValuePtr(encoding)); return(ruby_xml_document_encoding_get(self)); } /* * call-seq: * document.filename => "filename" * * Obtain the filename this document was read from. */ VALUE ruby_xml_document_filename_get(VALUE self) { ruby_xml_document *rxd; rx_file_data *data; Data_Get_Struct(self, ruby_xml_document, rxd); if (rxd->data == NULL) return(Qnil); switch (rxd->data_type) { case RUBY_LIBXML_SRC_TYPE_NULL: return(Qnil); case RUBY_LIBXML_SRC_TYPE_FILE: data = (rx_file_data *)rxd->data; return(data->filename); default: rb_fatal("Unknown document type in libxml"); } return(Qnil); } /* * call-seq: * document.find(xpath_expr, [namespace]) => nodeset * * Find nodes matching the specified xpath expression, optionally * using the specified namespace. Returns an XML::Node::Set. */ VALUE ruby_xml_document_find(int argc, VALUE *argv, VALUE self) { int i, vargc; VALUE *vargv; if (argc > 2 || argc < 1) rb_raise(rb_eArgError, "wrong number of arguments (need 1 or 2)"); vargc = argc + 1; vargv = ALLOC_N(VALUE, vargc + 1); vargv[0] = ruby_xml_document_root_get(self); for (i = 0; idoc != NULL && !rxd->is_ptr) { xmlFreeDoc(rxd->doc); ruby_xml_parser_count--; rxd->doc = NULL; } if (ruby_xml_parser_count == 0) xmlCleanupParser(); switch(rxd->data_type) { case RUBY_LIBXML_SRC_TYPE_NULL: break; case RUBY_LIBXML_SRC_TYPE_FILE: data = (void*)(rx_file_data *)rxd->data; free((rx_file_data *)data); break; case RUBY_LIBXML_SRC_TYPE_STRING: data = (void*)(rx_string_data *)rxd->data; free((rx_string_data *)data); break; case RUBY_LIBXML_SRC_TYPE_IO: data = (void*)(rx_io_data *)rxd->data; free((rx_io_data *)data); break; default: rb_fatal("Unknown data type, %d", rxd->data_type); } free(rxd); } /* * call-seq: * XML::Document.new(xml_version = 1.0) => document * * Create a new XML::Document, optionally specifying the * XML version. */ VALUE ruby_xml_document_initialize(int argc, VALUE *argv, VALUE class) { VALUE docobj, xmlver; switch (argc) { case 0: xmlver = rb_str_new2("1.0"); break; case 1: rb_scan_args(argc, argv, "01", &xmlver); break; default: rb_raise(rb_eArgError, "wrong number of arguments (need 0 or 1)"); } docobj = ruby_xml_document_new2(cXMLDocument, xmlver); return(docobj); } /* * call-seq: * document.last => node * * Obtain the last node. */ VALUE ruby_xml_document_last_get(VALUE self) { ruby_xml_document *rxd; ruby_xml_node *rxn; VALUE node; Data_Get_Struct(self, ruby_xml_document, rxd); if (rxd->doc->last == NULL) return(Qnil); node = ruby_xml_node_new2(cXMLNode, self, rxd->doc->last); Data_Get_Struct(node, ruby_xml_node, rxn); rxn->xd = self; return(node); } /* * call-seq: * document.last? => (true|false) * * Determine whether there is a last node. */ VALUE ruby_xml_document_last_q(VALUE self) { ruby_xml_document *rxd; Data_Get_Struct(self, ruby_xml_document, rxd); if (rxd->doc->last == NULL) return(Qfalse); else return(Qtrue); } static void ruby_xml_document_mark(ruby_xml_document *rxd) { if (rxd == NULL) return; if (!NIL_P(rxd->xmlver)) rb_gc_mark(rxd->xmlver); } VALUE ruby_xml_document_new(VALUE class, xmlDocPtr doc) { ruby_xml_document *rxd; rxd = ALLOC(ruby_xml_document); ruby_xml_parser_count++; rxd->data = NULL; rxd->data_type = RUBY_LIBXML_SRC_TYPE_NULL; rxd->doc = doc; rxd->is_ptr = 0; rxd->xmlver = Qnil; return(Data_Wrap_Struct(cXMLDocument, ruby_xml_document_mark, ruby_xml_document_free, rxd)); } VALUE ruby_xml_document_new2(VALUE class, VALUE xmlver) { ruby_xml_document *rxd; Check_Type(xmlver, T_STRING); rxd = ALLOC(ruby_xml_document); ruby_xml_parser_count++; rxd->data = NULL; rxd->data_type = RUBY_LIBXML_SRC_TYPE_NULL; rxd->doc = xmlNewDoc((xmlChar*)StringValuePtr(xmlver)); rxd->is_ptr = 0; rxd->xmlver = xmlver; if (rxd->doc == NULL) rb_fatal("bad"); return(Data_Wrap_Struct(cXMLDocument, ruby_xml_document_mark, ruby_xml_document_free, rxd)); } VALUE ruby_xml_document_new3(VALUE class) { return(ruby_xml_document_new2(class, rb_str_new2("1.0"))); } VALUE ruby_xml_document_new4(VALUE class, xmlDocPtr doc) { ruby_xml_document *rxd; rxd = ALLOC(ruby_xml_document); rxd->data = NULL; rxd->data_type = RUBY_LIBXML_SRC_TYPE_NULL; rxd->doc = doc; rxd->is_ptr = 1; rxd->xmlver = Qnil; return(Data_Wrap_Struct(cXMLDocument, ruby_xml_document_mark, ruby_xml_document_free, rxd)); } /* * call-seq: * XML::Document.file(filename) => document * * Create a new XML::Document by parsing the specified * file. */ VALUE ruby_xml_document_new_file(VALUE class, VALUE filename) { VALUE parser; parser = ruby_xml_parser_new(cXMLParser); ruby_xml_parser_filename_set(parser, filename); return(ruby_xml_parser_parse(parser)); } /* * call-seq: * document.next => node * * Obtain the next node. */ VALUE ruby_xml_document_next_get(VALUE self) { ruby_xml_document *rxd; ruby_xml_node *rxn; VALUE node; Data_Get_Struct(self, ruby_xml_document, rxd); if (rxd->doc->next == NULL) return(Qnil); node = ruby_xml_node_new2(cXMLNode, self, rxd->doc->next); Data_Get_Struct(node, ruby_xml_node, rxn); rxn->xd = self; return(node); } /* * call-seq: * document.next? => (true|false) * * Determine whether there is a next node. */ VALUE ruby_xml_document_next_q(VALUE self) { ruby_xml_document *rxd; Data_Get_Struct(self, ruby_xml_document, rxd); if (rxd->doc->next == NULL) return(Qfalse); else return(Qtrue); } /* * call-seq: * document.parent => node * * Obtain the parent node. */ VALUE ruby_xml_document_parent_get(VALUE self) { ruby_xml_document *rxd; ruby_xml_node *rxn; VALUE node; Data_Get_Struct(self, ruby_xml_document, rxd); if (rxd->doc->parent == NULL) return(Qnil); node = ruby_xml_node_new2(cXMLNode, self, rxd->doc->parent); Data_Get_Struct(node, ruby_xml_node, rxn); rxn->xd = self; return(node); } /* * call-seq: * document.parent? => (true|false) * * Determine whether there is a parent node. */ VALUE ruby_xml_document_parent_q(VALUE self) { ruby_xml_document *rxd; Data_Get_Struct(self, ruby_xml_document, rxd); if (rxd->doc->parent == NULL) return(Qfalse); else return(Qtrue); } /* * call-seq: * document.prev => node * * Obtain the previous node. */ VALUE ruby_xml_document_prev_get(VALUE self) { ruby_xml_document *rxd; ruby_xml_node *rxn; VALUE node; Data_Get_Struct(self, ruby_xml_document, rxd); if (rxd->doc->prev == NULL) return(Qnil); node = ruby_xml_node_new2(cXMLNode, self, rxd->doc->prev); Data_Get_Struct(node, ruby_xml_node, rxn); rxn->xd = self; return(node); } /* * call-seq: * document.prev? => (true|false) * * Determine whether there is a previous node. */ VALUE ruby_xml_document_prev_q(VALUE self) { ruby_xml_document *rxd; Data_Get_Struct(self, ruby_xml_document, rxd); if (rxd->doc->prev == NULL) return(Qfalse); else return(Qtrue); } /* * call-seq: * document["key"] => "value" * * Obtain the named property. */ VALUE ruby_xml_document_property_get(VALUE self, VALUE key) { return(ruby_xml_node_property_get(ruby_xml_document_root_get(self), key)); } /* * call-seq: * document["key"] = "value" * * Set the named property. */ VALUE ruby_xml_document_property_set(VALUE self, VALUE key, VALUE val) { return(ruby_xml_node_property_set(ruby_xml_document_root_get(self), key, val)); } /* * call-seq: * document.root => node * * Obtain the root node. */ VALUE ruby_xml_document_root_get(VALUE self) { ruby_xml_document *rxd; ruby_xml_node *rxn; VALUE node; xmlNodePtr root; Data_Get_Struct(self, ruby_xml_document, rxd); root = xmlDocGetRootElement(rxd->doc); if (root == NULL) return(Qnil); node = ruby_xml_node_new2(cXMLNode, self, root); Data_Get_Struct(node, ruby_xml_node, rxn); rxn->xd = self; return(node); } /* * call-seq: * document.root = node * * Set the root node. */ VALUE ruby_xml_document_root_set(VALUE self, VALUE node) { ruby_xml_document *rxd; ruby_xml_node *rxn; VALUE retnode; xmlNodePtr root; if (rb_obj_is_kind_of(node, cXMLNode) == Qfalse) rb_raise(rb_eTypeError, "must pass an XML::Node type object"); Data_Get_Struct(self, ruby_xml_document, rxd); Data_Get_Struct(node, ruby_xml_node, rxn); ruby_xml_node_set_ptr(node, 1); root = xmlDocSetRootElement(rxd->doc, rxn->node); if (root == NULL) return(Qnil); retnode = ruby_xml_node_new2(cXMLNode, self, root); return(retnode); } /* * call-seq: * document.save(format = false) * * Save this document to the file given by filename, * optionally formatting the output. */ VALUE ruby_xml_document_save(int argc, VALUE *argv, VALUE self) { ruby_xml_document *rxd; const char *filename; int format, len; format = 0; switch (argc) { case 1: break; case 2: if (TYPE(argv[1]) == T_TRUE) format = 1; else if (TYPE(argv[1]) == T_FALSE) format = 0; else rb_raise(rb_eTypeError, "wrong type of argument, must be bool"); break; default: rb_raise(rb_eArgError, "wrong number of arguments (0 or 1)"); } Check_Type(argv[0], T_STRING); filename = StringValuePtr(argv[0]); Data_Get_Struct(self, ruby_xml_document, rxd); len = xmlSaveFormatFileEnc(filename, rxd->doc, (const char*)rxd->doc->encoding, format); if (len == -1) rb_fatal("Unable to write out file"); else return(INT2NUM(len)); } /* * call-seq: * document.standalone? => (true|false) * * Determine whether this is a standalone document. */ VALUE ruby_xml_document_standalone_q(VALUE self) { ruby_xml_document *rxd; Data_Get_Struct(self, ruby_xml_document, rxd); if (rxd->doc->standalone) return(Qtrue); else return(Qfalse); } /* * call-seq: * document.to_s => "xml" * * Coerce this document to a string representation * of it's XML. */ VALUE ruby_xml_document_to_s(int argc, VALUE *argv, VALUE self) { ruby_xml_document *rxd; xmlChar *result; int format, len; switch (argc) { case 0: format = 1; break; case 1: if (TYPE(argv[0]) == T_TRUE) format = 1; else if (TYPE(argv[0]) == T_FALSE) format = 0; else rb_raise(rb_eTypeError, "wrong type of argument, must be bool"); break; default: rb_raise(rb_eArgError, "wrong number of arguments (0 or 1)"); } Data_Get_Struct(self, ruby_xml_document, rxd); if (rxd->doc == NULL) { return(Qnil); } else if (rxd->doc->encoding != NULL) { if (format) { xmlDocDumpFormatMemoryEnc(rxd->doc, &result, &len, (const char*)rxd->doc->encoding, format); } else { xmlDocDumpMemoryEnc(rxd->doc, &result, &len, (const char*)rxd->doc->encoding); } } else { if (format) xmlDocDumpFormatMemory(rxd->doc, &result, &len, format); else xmlDocDumpMemory(rxd->doc, &result, &len); } return(rb_str_new2((const char*)result)); } /* * call-seq: * document.url => "url" * * Obtain this document's source URL, if any. */ VALUE ruby_xml_document_url_get(VALUE self) { ruby_xml_document *rxd; Data_Get_Struct(self, ruby_xml_document, rxd); if (rxd->doc->URL == NULL) return(Qnil); else return(rb_str_new2((const char*)rxd->doc->URL)); } /* * call-seq: * document.version => "version" * * Obtain the XML version specified by this document. */ VALUE ruby_xml_document_version_get(VALUE self) { ruby_xml_document *rxd; Data_Get_Struct(self, ruby_xml_document, rxd); if (rxd->doc->version == NULL) return(Qnil); else return(rb_str_new2((const char*)rxd->doc->version)); } /* * call-seq: * document.xinclude => num * * Process xinclude directives in this document. */ VALUE ruby_xml_document_xinclude(VALUE self) { #ifdef LIBXML_XINCLUDE_ENABLED ruby_xml_document *rxd; int ret; Data_Get_Struct(self, ruby_xml_document, rxd); ret = xmlXIncludeProcess(rxd->doc); if (ret >= 0) return(INT2NUM(ret)); else rb_raise(eXMLXIncludeError, "error processing xinclude directives in document"); #else rb_warn("libxml was compiled without XInclude support. Please recompile libxml and ruby-libxml"); return(Qfalse); #endif } void LibXML_validity_error(void * ctxt, const char * msg, va_list ap) { if (rb_block_given_p()) { char buff[1024]; snprintf(buff, 1024, msg, ap); rb_yield(rb_ary_new3(2, rb_str_new2(buff), Qtrue)); } else { fprintf(stderr, "error -- found validity error: "); fprintf(stderr, msg, ap); } } void LibXML_validity_warning(void * ctxt, const char * msg, va_list ap) { if (rb_block_given_p()) { char buff[1024]; snprintf(buff, 1024, msg, ap); rb_yield(rb_ary_new3(2, rb_str_new2(buff), Qfalse)); } else { fprintf(stderr, "warning -- found validity error: "); fprintf(stderr, msg, ap); } } /* * call-seq: * document.validate(schema) => (true|false) * * Validate this document against the specified XML::Schema. */ VALUE ruby_xml_document_validate_schema(VALUE self, VALUE schema) { xmlSchemaValidCtxtPtr vptr; ruby_xml_document *c_doc; ruby_xml_schema *c_schema; int is_invalid; Data_Get_Struct(self, ruby_xml_document, c_doc); Data_Get_Struct(schema, ruby_xml_schema, c_schema); vptr = xmlSchemaNewValidCtxt(c_schema->schema); xmlSchemaSetValidErrors(vptr, (xmlSchemaValidityErrorFunc)LibXML_validity_error, (xmlSchemaValidityWarningFunc)LibXML_validity_warning, NULL); is_invalid = xmlSchemaValidateDoc(vptr, c_doc->doc); xmlSchemaFreeValidCtxt(vptr); if (is_invalid) { return Qfalse; } else { return Qtrue; } } /* * call-seq: * document.validate(schema) => (true|false) * * Validate this document against the specified XML::DTD. */ VALUE ruby_xml_document_validate_dtd(VALUE self, VALUE dtd) { xmlValidCtxt cvp; ruby_xml_document *c_doc; ruby_xml_dtd *c_dtd; Data_Get_Struct(self, ruby_xml_document, c_doc); Data_Get_Struct(dtd, ruby_xml_dtd, c_dtd); cvp.userData = NULL; cvp.error = (xmlValidityErrorFunc)LibXML_validity_error; cvp.warning = (xmlValidityWarningFunc)LibXML_validity_warning; cvp.nodeNr = 0; cvp.nodeTab = NULL; cvp.vstateNr = 0; cvp.vstateTab = NULL; if ( xmlValidateDtd(&cvp, c_doc->doc, c_dtd->dtd) ) return(Qtrue); else return(Qfalse); // int xmlValidateDtd(xmlValidCtxtPtr ctxt, xmlDocPtr doc, xmlDtdPtr dtd) /* int validate(self, ...) xmlDocPtr self PREINIT: xmlValidCtxt cvp; xmlDtdPtr dtd; SV * dtd_sv; STRLEN n_a, len; CODE: LibXML_init_error(); cvp.userData = (void*)PerlIO_stderr(); cvp.error = (xmlValidityErrorFunc)LibXML_validity_error; cvp.warning = (xmlValidityWarningFunc)LibXML_validity_warning; // we need to initialize the node stack, because perl might // already messed it up. // cvp.nodeNr = 0; cvp.nodeTab = NULL; cvp.vstateNr = 0; cvp.vstateTab = NULL; if (items > 1) { dtd_sv = ST(1); if ( sv_isobject(dtd_sv) && (SvTYPE(SvRV(dtd_sv)) == SVt_PVMG) ) { dtd = (xmlDtdPtr)PmmSvNode(dtd_sv); } else { croak("is_valid: argument must be a DTD object"); } RETVAL = xmlValidateDtd(&cvp, self , dtd); } else { RETVAL = xmlValidateDocument(&cvp, self); } sv_2mortal(LibXML_error); if (RETVAL == 0) { LibXML_croak_error(); } OUTPUT: RETVAL */ } // Rdoc needs to know #ifdef RDOC_NEVER_DEFINED mXML = rb_define_module("XML"); #endif void ruby_init_xml_document(void) { cXMLDocument = rb_define_class_under(mXML, "Document", rb_cObject); rb_define_singleton_method(cXMLDocument, "file", ruby_xml_document_new_file, 1); rb_define_singleton_method(cXMLDocument, "new", ruby_xml_document_initialize, -1); //rb_raise(eXMLNodeFailedModify, "unable to add a child to the document"); //eDTDValidityWarning = rb_define_class_under(cXMLNode, "ValidityWarning", rb_eException); //eDTDValidityError = rb_define_class_under(cXMLNode, "ValidityWarning", rb_eException); rb_define_method(cXMLDocument, "[]", ruby_xml_document_property_get, 1); rb_define_method(cXMLDocument, "[]=", ruby_xml_document_property_set, 2); rb_define_method(cXMLDocument, "child", ruby_xml_document_child_get, 0); rb_define_method(cXMLDocument, "child?", ruby_xml_document_child_q, 0); rb_define_method(cXMLDocument, "compression", ruby_xml_document_compression_get, 0); rb_define_method(cXMLDocument, "compression=", ruby_xml_document_compression_set, 1); rb_define_method(cXMLDocument, "compression?", ruby_xml_document_compression_q, 0); rb_define_method(cXMLDocument, "dump", ruby_xml_document_dump, -1); rb_define_method(cXMLDocument, "debug_dump", ruby_xml_document_debug_dump, -1); rb_define_method(cXMLDocument, "debug_dump_head", ruby_xml_document_debug_dump_head, -1); rb_define_method(cXMLDocument, "debug_format_dump", ruby_xml_document_debug_format_dump, -1); rb_define_method(cXMLDocument, "encoding", ruby_xml_document_encoding_get, 0); rb_define_method(cXMLDocument, "encoding=", ruby_xml_document_encoding_set, 1); rb_define_method(cXMLDocument, "filename", ruby_xml_document_filename_get, 0); rb_define_method(cXMLDocument, "find", ruby_xml_document_find, -1); rb_define_method(cXMLDocument, "format_dump", ruby_xml_document_format_dump, -1); rb_define_method(cXMLDocument, "last", ruby_xml_document_last_get, 0); rb_define_method(cXMLDocument, "last?", ruby_xml_document_last_q, 0); rb_define_method(cXMLDocument, "next", ruby_xml_document_next_get, 0); rb_define_method(cXMLDocument, "next?", ruby_xml_document_next_q, 0); rb_define_method(cXMLDocument, "parent", ruby_xml_document_parent_get, 0); rb_define_method(cXMLDocument, "parent?", ruby_xml_document_parent_q, 0); rb_define_method(cXMLDocument, "prev", ruby_xml_document_prev_get, 0); rb_define_method(cXMLDocument, "prev?", ruby_xml_document_prev_q, 0); rb_define_method(cXMLDocument, "root", ruby_xml_document_root_get, 0); rb_define_method(cXMLDocument, "root=", ruby_xml_document_root_set, 1); rb_define_method(cXMLDocument, "save", ruby_xml_document_save, -1); rb_define_method(cXMLDocument, "standalone?", ruby_xml_document_standalone_q, 0); rb_define_method(cXMLDocument, "to_s", ruby_xml_document_to_s, -1); rb_define_method(cXMLDocument, "url", ruby_xml_document_url_get, 0); rb_define_method(cXMLDocument, "version", ruby_xml_document_version_get, 0); rb_define_method(cXMLDocument, "xinclude", ruby_xml_document_xinclude, 0); rb_define_method(cXMLDocument, "validate", ruby_xml_document_validate_dtd, 1); rb_define_method(cXMLDocument, "validate_schema", ruby_xml_document_validate_schema, 1); } ruby-mkrf-0.2.3.orig/test/sample_files/libxml-ruby-0.3.8/ext/xml/sax_parser_callbacks.inc0000644000000000000000000001300711672453175027700 0ustar rootroot/* $Id: sax_parser_callbacks.inc,v 1.1 2006/04/14 14:50:58 roscopeco Exp $ */ /* Please see the LICENSE file for copyright and distribution information */ /* * SAX CALLBACK HANDLERS */ static void internal_subset_func(ruby_xml_sax_parser_callbacks *cbp, const char *name, const char *extid, const char *sysid) { VALUE handler = cbp->internalSubset; if (handler && handler != Qnil) { rb_funcall(handler, callsym, 3, rb_str_new2(name), rb_str_new2(extid), rb_str_new2(sysid)); } } static void is_standalone_func(ruby_xml_sax_parser_callbacks *cbp) { VALUE handler = cbp->isStandalone; if (handler && handler != Qnil) { rb_funcall(handler,callsym,0); } } static void has_internal_subset_func(ruby_xml_sax_parser_callbacks *cbp) { VALUE handler = cbp->hasInternalSubset; if (handler && handler != Qnil) { rb_funcall(handler,callsym,0); } } static void has_external_subset_func(ruby_xml_sax_parser_callbacks *cbp) { VALUE handler = cbp->hasExternalSubset; if (handler && handler != Qnil) { rb_funcall(handler,callsym,0); } } static void start_document_func(ruby_xml_sax_parser_callbacks *cbp) { VALUE handler = cbp->startDocument; if (handler && handler != Qnil) { rb_funcall(handler,callsym,0); } } static void end_document_func(ruby_xml_sax_parser_callbacks *cbp) { VALUE handler = cbp->endDocument; if (handler && handler != Qnil) { rb_funcall(handler,callsym,0); } } static void start_element_func(ruby_xml_sax_parser_callbacks *cbp, const char *name, const char **attrs) { VALUE handler = cbp->startElement; VALUE ahsh = rb_hash_new(); const char *attr, *value; if (attrs) { while ((attr = *(attrs++))) { value = *(attrs++); rb_hash_aset(ahsh, rb_str_new2(attr), rb_str_new2(value)); } } if (handler && handler != Qnil) { rb_funcall(handler,callsym,2,rb_str_new2(name),ahsh); } } static void end_element_func(ruby_xml_sax_parser_callbacks *cbp, const char *name) { VALUE handler = cbp->endElement; if (handler && handler != Qnil) { rb_funcall(handler,callsym,1,rb_str_new2(name)); } } static void reference_func(ruby_xml_sax_parser_callbacks *cbp, const char *name) { VALUE handler = cbp->reference; if (handler && handler != Qnil) { rb_funcall(handler,callsym,1,rb_str_new2(name)); } } static void characters_func(ruby_xml_sax_parser_callbacks *cbp, const char *chars, int len) { VALUE handler = cbp->characters; if (handler && handler != Qnil) { rb_funcall(handler,callsym,1,rb_str_new(chars, len)); } } static void processing_instruction_func(ruby_xml_sax_parser_callbacks *cbp, const char *target, const char *data) { VALUE handler = cbp->processingInstruction; if (handler && handler != Qnil) { rb_funcall(handler, callsym, 2, rb_str_new2(target),rb_str_new2(data)); } } static void comment_func(ruby_xml_sax_parser_callbacks *cbp, const char *msg) { VALUE handler = cbp->comment; if (handler && handler != Qnil) { rb_funcall(handler,callsym,1,rb_str_new2(msg)); } } // TODO these next three should actually be formatting messages. static void warning_func(ruby_xml_sax_parser_callbacks *cbp, const char *msg, ...) { VALUE handler = cbp->xmlParserWarning; if (handler && handler != Qnil) { rb_funcall(handler,callsym,1,rb_str_new2(msg)); } } static void error_func(ruby_xml_sax_parser_callbacks *cbp, const char *msg, ...) { VALUE handler = cbp->xmlParserError; if (handler && handler != Qnil) { rb_funcall(handler,callsym,1,rb_str_new2(msg)); } } static void fatal_error_func(ruby_xml_sax_parser_callbacks *cbp, const char *msg, ...) { VALUE handler = cbp->xmlParserFatalError; if (handler && handler != Qnil) { rb_funcall(handler,callsym,1,rb_str_new2(msg)); } } static void cdata_block_func(ruby_xml_sax_parser_callbacks *cbp, const char *value, int len) { VALUE handler = cbp->cdataBlock; if (handler && handler != Qnil) { rb_funcall(handler,callsym,1,rb_str_new(value, len)); } } static void external_subset_func(ruby_xml_sax_parser_callbacks *cbp, const char *name, const char *extid, const char *sysid) { VALUE handler = cbp->externalSubset; if (handler && handler != Qnil) { rb_funcall(handler, callsym, 3, rb_str_new2(name), rb_str_new2(extid), rb_str_new2(sysid)); } } static xmlSAXHandler rubySAXHandlerStruct = { (internalSubsetSAXFunc)internal_subset_func, (isStandaloneSAXFunc)is_standalone_func, (hasInternalSubsetSAXFunc)has_internal_subset_func, (hasExternalSubsetSAXFunc)has_external_subset_func, 0, /* resolveEntity */ 0, /* getEntity */ 0, /* entityDecl */ 0, /* notationDecl */ 0, /* attributeDecl */ 0, /* elementDecl */ 0, /* unparsedEntityDecl */ 0, /* setDocumentLocator */ (startDocumentSAXFunc)start_document_func, (endDocumentSAXFunc)end_document_func, (startElementSAXFunc)start_element_func, (endElementSAXFunc)end_element_func, (referenceSAXFunc)reference_func, (charactersSAXFunc)characters_func, 0, /* ignorableWhitespace */ (processingInstructionSAXFunc)processing_instruction_func, (commentSAXFunc)comment_func, (warningSAXFunc)warning_func, (errorSAXFunc)error_func, (fatalErrorSAXFunc)fatal_error_func, 0, /* xmlGetParameterEntity */ (cdataBlockSAXFunc)cdata_block_func, (externalSubsetSAXFunc)external_subset_func, 1 }; ruby-mkrf-0.2.3.orig/test/sample_files/libxml-ruby-0.3.8/ext/xml/ruby_xml_node.c0000644000000000000000000013664111672453175026063 0ustar rootroot/* $Id: ruby_xml_node.c,v 1.3 2006/04/12 12:08:39 roscopeco Exp $ */ /* Please see the LICENSE file for copyright and distribution information */ #include "libxml.h" #include "ruby_xml_node.h" VALUE cXMLNode; VALUE eXMLNodeSetNamespace; VALUE eXMLNodeFailedModify; VALUE eXMLNodeUnknownType; /* * call-seq: * node.attribute? => (true|false) * * Determine whether this is an attribute node, */ VALUE ruby_xml_node_attribute_q(VALUE self) { ruby_xml_node *rxn; Data_Get_Struct(self, ruby_xml_node, rxn); if (rxn->node->type == XML_ATTRIBUTE_NODE) return(Qtrue); else return(Qfalse); } /* * call-seq: * node.attribute_decl? => (true|false) * * Determine whether this is an attribute declaration node, */ VALUE ruby_xml_node_attribute_decl_q(VALUE self) { ruby_xml_node *rxn; Data_Get_Struct(self, ruby_xml_node, rxn); if (rxn->node->type == XML_ATTRIBUTE_DECL) return(Qtrue); else return(Qfalse); } /* * call-seq: * node.base => "uri" * * Obtain this node's base URI. */ VALUE ruby_xml_node_base_get(VALUE self) { ruby_xml_node *rxn; Data_Get_Struct(self, ruby_xml_node, rxn); if (rxn->node->doc == NULL) return(Qnil); // TODO some NULL checking, raises ArgumentError in Ruby: // ArgumentError: NULL pointer given return(rb_str_new2((const char*)xmlNodeGetBase(rxn->node->doc, rxn->node))); } // TODO node_base_set should support setting back to nil /* * call-seq: * node.base = "uri" * * Set this node's base URI. */ VALUE ruby_xml_node_base_set(VALUE self, VALUE uri) { ruby_xml_node *node; Check_Type(uri, T_STRING); Data_Get_Struct(self, ruby_xml_node, node); if (node->node->doc == NULL) return(Qnil); xmlNodeSetBase(node->node, (xmlChar*)StringValuePtr(uri)); return(Qtrue); } /* * call-seq: * node.cdata? => (true|false) * * Determine whether this is a #CDATA node */ VALUE ruby_xml_node_cdata_q(VALUE self) { ruby_xml_node *rxn; Data_Get_Struct(self, ruby_xml_node, rxn); if (rxn->node->type == XML_CDATA_SECTION_NODE) return(Qtrue); else return(Qfalse); } /* * call-seq: * node.comment? => (true|false) * * Determine whether this is a comment node */ VALUE ruby_xml_node_comment_q(VALUE self) { ruby_xml_node *rxn; Data_Get_Struct(self, ruby_xml_node, rxn); if (rxn->node->type == XML_COMMENT_NODE) return(Qtrue); else return(Qfalse); } /* * call-seq: * node << ("string" | node) * * Add the specified string or XML::Node to this node's * content. */ VALUE ruby_xml_node_content_add(VALUE self, VALUE obj) { ruby_xml_node *node; VALUE str; Data_Get_Struct(self, ruby_xml_node, node); if (rb_obj_is_kind_of(obj, cXMLNode)) { return(ruby_xml_node_child_set(self, obj)); } else if (TYPE(obj) == T_STRING) { xmlNodeAddContent(node->node, (xmlChar*)StringValuePtr(obj)); return(obj); } else { str = rb_obj_as_string(obj); if (NIL_P(str) || TYPE(str) != T_STRING) rb_raise(rb_eTypeError, "invalid argument: must be string or XML::Node"); xmlNodeAddContent(node->node, (xmlChar*)StringValuePtr(str)); return(obj); } } /* * call-seq: * node.content => "string" * * Obtain this node's content as a string. */ VALUE ruby_xml_node_content_get(VALUE self) { ruby_xml_node *rxn; xmlChar *content; VALUE out; Data_Get_Struct(self, ruby_xml_node, rxn); content = xmlNodeGetContent(rxn->node); out = rb_str_new2((const char *) content); xmlFree(content); return out; } /* * call-seq: * node.content = "string" * * Set this node's content to the specified string. */ VALUE ruby_xml_node_content_set(VALUE self, VALUE content) { ruby_xml_node *node; Check_Type(content, T_STRING); Data_Get_Struct(self, ruby_xml_node, node); xmlNodeSetContent(node->node, (xmlChar*)StringValuePtr(content)); return(Qtrue); } /* * call-seq: * node.content_stripped => "string" * * Obtain this node's stripped content. * * *Deprecated*: Stripped content can be obtained via the * +content+ method. */ VALUE ruby_xml_node_content_stripped_get(VALUE self) { ruby_xml_node *rxn; Data_Get_Struct(self, ruby_xml_node, rxn); if (rxn->node->content == NULL) return(Qnil); else return(rb_str_new2((const char*)xmlNodeGetContent(rxn->node))); } //////////////////////////////////////////////////// // TODO This whole child thing seems to work in some odd ways. // Try setting child= to a node with multiple children, // then get it back through child= . /* * call-seq: * node.child => node * * Obtain this node's first child node, if any. */ VALUE ruby_xml_node_child_get(VALUE self) { ruby_xml_node *node; xmlNodePtr tmp; Data_Get_Struct(self, ruby_xml_node, node); switch (node->node->type) { case XML_ELEMENT_NODE: case XML_ENTITY_REF_NODE: case XML_ENTITY_NODE: case XML_PI_NODE: case XML_COMMENT_NODE: case XML_DOCUMENT_NODE: #ifdef LIBXML_DOCB_ENABLED case XML_DOCB_DOCUMENT_NODE: #endif case XML_HTML_DOCUMENT_NODE: case XML_DTD_NODE: tmp = node->node->children; break; case XML_ATTRIBUTE_NODE: { xmlAttrPtr attr = (xmlAttrPtr) node->node; tmp = attr->children; break; } default: tmp = NULL; break; } if (tmp == NULL) return(Qnil); else return(ruby_xml_node_new2(cXMLNode, node->xd, tmp)); } /* * call-seq: * node.child? => (true|false) * * Determine whether this node has at least one child. */ VALUE ruby_xml_node_child_q(VALUE self) { ruby_xml_node *rxn; xmlNodePtr node; Data_Get_Struct(self, ruby_xml_node, rxn); node = NULL; switch (rxn->node->type) { case XML_ELEMENT_NODE: case XML_ENTITY_REF_NODE: case XML_ENTITY_NODE: case XML_PI_NODE: case XML_COMMENT_NODE: case XML_DOCUMENT_NODE: #ifdef LIBXML_DOCB_ENABLED case XML_DOCB_DOCUMENT_NODE: #endif case XML_HTML_DOCUMENT_NODE: case XML_DTD_NODE: node = rxn->node->children; break; case XML_ATTRIBUTE_NODE: { xmlAttrPtr attr = (xmlAttrPtr) rxn->node; node = attr->children; break; } default: node = NULL; } if (node == NULL) return(Qfalse); else return(Qtrue); } /* * call-seq: * node.child = node * * Set a child node for this node. */ VALUE ruby_xml_node_child_set(VALUE self, VALUE rnode) { ruby_xml_node *cnode, *pnode; xmlNodePtr ret; if (rb_obj_is_kind_of(rnode, cXMLNode) == Qfalse) rb_raise(rb_eTypeError, "Must pass an XML::Node object"); Data_Get_Struct(self, ruby_xml_node, pnode); Data_Get_Struct(rnode, ruby_xml_node, cnode); ret = xmlAddChild(pnode->node, cnode->node); if (ret == NULL) rb_raise(eXMLNodeFailedModify, "unable to add a child to the document"); ruby_xml_node_set_ptr(rnode, 1); return(ruby_xml_node_new2(cXMLNode, pnode->xd, ret)); } //////////////////////////////////////////////// // TODO new Documents seem to be created quite readily... /* * call-seq: * node.doc => document * * Obtain the XML::Document this node belongs to. */ VALUE ruby_xml_node_doc(VALUE self) { ruby_xml_document *rxd; ruby_xml_node *rxn; xmlDocPtr doc; VALUE docobj; Data_Get_Struct(self, ruby_xml_node, rxn); switch (rxn->node->type) { case XML_DOCUMENT_NODE: #ifdef LIBXML_DOCB_ENABLED case XML_DOCB_DOCUMENT_NODE: #endif case XML_HTML_DOCUMENT_NODE: doc = NULL; break; case XML_ATTRIBUTE_NODE: { xmlAttrPtr attr = (xmlAttrPtr) rxn->node; doc = attr->doc; break; } case XML_NAMESPACE_DECL: doc = NULL; break; default: doc = rxn->node->doc; break; } if (doc == NULL) return(Qnil); docobj = ruby_xml_document_new(cXMLDocument, doc); Data_Get_Struct(docobj, ruby_xml_document, rxd); rxd->is_ptr = 1; return(docobj); } /* * call-seq: * node.docbook? => (true|false) * * Determine whether this is a docbook node. */ VALUE ruby_xml_node_docbook_doc_q(VALUE self) { #ifdef LIBXML_DOCB_ENABLED ruby_xml_node *rxn; Data_Get_Struct(self, ruby_xml_node, rxn); if (rxn->node->type == XML_DOCB_DOCUMENT_NODE) return(Qtrue); else return(Qfalse); #else rb_warn("libxml compiled without docbook support"); return(Qfalse); #endif } /* * call-seq: * node.doctype? => (true|false) * * Determine whether this is a DOCTYPE node. */ VALUE ruby_xml_node_doctype_q(VALUE self) { ruby_xml_node *rxn; Data_Get_Struct(self, ruby_xml_node, rxn); if (rxn->node->type == XML_DOCUMENT_TYPE_NODE) return(Qtrue); else return(Qfalse); } /* * call-seq: * node.document? => (true|false) * * Determine whether this is a document node. */ VALUE ruby_xml_node_document_q(VALUE self) { ruby_xml_node *rxn; Data_Get_Struct(self, ruby_xml_node, rxn); if (rxn->node->type == XML_DOCUMENT_NODE) return(Qtrue); else return(Qfalse); } /* * call-seq: * node.dtd? => (true|false) * * Determine whether this is a DTD node. */ VALUE ruby_xml_node_dtd_q(VALUE self) { ruby_xml_node *rxn; Data_Get_Struct(self, ruby_xml_node, rxn); if (rxn->node->type == XML_DTD_NODE) return(Qtrue); else return(Qfalse); } /* * call-seq: * node.dump => (true|nil) * * Dump this node to stdout. */ VALUE ruby_xml_node_dump(VALUE self) { ruby_xml_node *rxn; xmlBufferPtr buf; Data_Get_Struct(self, ruby_xml_node, rxn); if (rxn->node->doc == NULL) return(Qnil); buf = xmlBufferCreate(); xmlNodeDump(buf, rxn->node->doc, rxn->node, 0, 1); xmlBufferDump(stdout, buf); xmlBufferFree(buf); return(Qtrue); } /* * call-seq: * node.debug_dump => (true|nil) * * Dump this node to stdout, including any debugging * information. */ VALUE ruby_xml_node_debug_dump(VALUE self) { ruby_xml_node *rxn; Data_Get_Struct(self, ruby_xml_node, rxn); if (rxn->node->doc == NULL) return(Qnil); xmlElemDump(stdout, rxn->node->doc, rxn->node); return(Qtrue); } /* * call-seq: * node.element? => (true|false) * * Determine whether this is an element node. */ VALUE ruby_xml_node_element_q(VALUE self) { ruby_xml_node *rxn; Data_Get_Struct(self, ruby_xml_node, rxn); if (rxn->node->type == XML_ELEMENT_NODE) return(Qtrue); else return(Qfalse); } /* * call-seq: * node.element_decl? => (true|false) * * Determine whether this is an element declaration node. */ VALUE ruby_xml_node_element_decl_q(VALUE self) { ruby_xml_node *rxn; Data_Get_Struct(self, ruby_xml_node, rxn); if (rxn->node->type == XML_ELEMENT_DECL) return(Qtrue); else return(Qfalse); } /* * call-seq: * node.empty? => (true|false) * * Determine whether this node is empty. */ VALUE ruby_xml_node_empty_q(VALUE self) { ruby_xml_node *rxn; Data_Get_Struct(self, ruby_xml_node, rxn); if (rxn->node == NULL) return(Qnil); return((xmlIsBlankNode(rxn->node) == 1) ? Qtrue : Qfalse); } /* * call-seq: * node.entity? => (true|false) * * Determine whether this is an entity node. */ VALUE ruby_xml_node_entity_q(VALUE self) { ruby_xml_node *rxn; Data_Get_Struct(self, ruby_xml_node, rxn); if (rxn->node->type == XML_ENTITY_NODE) return(Qtrue); else return(Qfalse); } /* * call-seq: * node.entity_ref? => (true|false) * * Determine whether this is an entity reference node. */ VALUE ruby_xml_node_entity_ref_q(VALUE self) { ruby_xml_node *rxn; Data_Get_Struct(self, ruby_xml_node, rxn); if (rxn->node->type == XML_ENTITY_REF_NODE) return(Qtrue); else return(Qfalse); } VALUE ruby_xml_node_to_s(VALUE self); /* * call-seq: * node.eql?(other_node) => (true|false) * * Test equality between the two nodes. Equality is determined based * on the XML representation of the nodes. */ VALUE ruby_xml_node_eql_q(VALUE self, VALUE other) { // TODO this isn't the best way to handle this ruby_xml_node *rxn, *orxn; VALUE thisxml, otherxml; Data_Get_Struct(self, ruby_xml_node, rxn); Data_Get_Struct(other, ruby_xml_node, orxn); thisxml = ruby_xml_node_to_s(self); otherxml = ruby_xml_node_to_s(other); return(rb_funcall(thisxml, rb_intern("=="), 1, otherxml)); } /* * call-seq: * node.find(xpath_expr, namespace = [any]) => nodeset * * Find nodes matching the specified xpath expression, optionally * using the specified namespaces. Returns an XML::Node::Set. */ VALUE ruby_xml_node_find(int argc, VALUE *argv, VALUE self) { int i, vargc; VALUE *vargv; if (argc > 2 || argc < 1) rb_raise(rb_eArgError, "wrong number of arguments (need 1 or 2)"); vargc = argc + 1; vargv = ALLOC_N(VALUE, vargc + 1); vargv[0] = self; for (i = 0; i nodeset * * Find the first node matching the specified xpath expression, optionally * using the specified namespaces. Returns an XML::Node. */ VALUE ruby_xml_node_find_first(int argc, VALUE *argv, VALUE self) { VALUE ns = ruby_xml_node_find(argc, argv, self); ruby_xml_node_set *rxnset; Data_Get_Struct(ns, ruby_xml_node_set, rxnset); if (rxnset->node_set == NULL || rxnset->node_set->nodeNr < 1) return(Qnil); VALUE nodeobj; switch(rxnset->node_set->nodeTab[0]->type) { case XML_ATTRIBUTE_NODE: nodeobj = ruby_xml_attr_new2(cXMLAttr, rxnset->xd, (xmlAttrPtr)rxnset->node_set->nodeTab[0]); break; default: nodeobj = ruby_xml_node_new2(cXMLNode, rxnset->xd, rxnset->node_set->nodeTab[0]); } return(nodeobj); } /* * call-seq: * node.fragment? => (true|false) * * Determine whether this node is a fragment. */ VALUE ruby_xml_node_fragment_q(VALUE self) { ruby_xml_node *rxn; Data_Get_Struct(self, ruby_xml_node, rxn); if (rxn->node->type == XML_DOCUMENT_FRAG_NODE) return(Qtrue); else return(Qfalse); } void ruby_xml_node_free(ruby_xml_node *rxn) { if (rxn->node != NULL && !rxn->is_ptr) { xmlUnlinkNode(rxn->node); xmlFreeNode(rxn->node); rxn->node = NULL; } free(rxn); } /* * call-seq: * node.hash => fixnum * * Returns the hash-code for this node. This is the hash of the XML * representation in order to be consistent with eql. */ VALUE ruby_xml_node_hash(VALUE self) { ruby_xml_node *rxn; VALUE thisxml; Data_Get_Struct(self, ruby_xml_node, rxn); thisxml = ruby_xml_node_to_s(self); return(rb_funcall(thisxml, rb_intern("hash"), 0)); } /* * call-seq: * node.html_doc? => (true|false) * * Determine whether this node is an html document node. */ VALUE ruby_xml_node_html_doc_q(VALUE self) { ruby_xml_node *rxn; Data_Get_Struct(self, ruby_xml_node, rxn); if (rxn->node->type == XML_HTML_DOCUMENT_NODE) return(Qtrue); else return(Qfalse); } /* * call-seq: * XML::Node.new(name, content = nil) => node * * Create a new node with the specified name, optionally setting * the node's content. */ VALUE ruby_xml_node_initialize(int argc, VALUE *argv, VALUE class) { ruby_xml_node *rxn; VALUE name, node, str; str = Qnil; switch(argc) { case 2: switch (TYPE(str)) { case T_STRING: str = argv[1]; break; default: str = rb_obj_as_string(argv[1]); if (NIL_P(str)) Check_Type(str, T_STRING); break; } /* Intentionally fall through to case 1: as a way of setting up * the object. Sneaky, but effective. Probably should use a goto * instead. */ case 1: name = argv[0]; Check_Type(name, T_STRING); node = ruby_xml_node_new(class, NULL); Data_Get_Struct(node, ruby_xml_node, rxn); rxn->node = xmlNewNode(NULL, (xmlChar*)StringValuePtr(name)); if (rxn->node == NULL) return(Qnil); if (!NIL_P(str)) ruby_xml_node_content_set(node, str); break; default: rb_raise(rb_eArgError, "wrong number of arguments (1 or 2)"); } return(node); } /* * call-seq: * node.lang => "string" * * Obtain the language set for this node, if any. * This is set in XML via the xml:lang attribute. */ VALUE ruby_xml_node_lang_get(VALUE self) { ruby_xml_node *rxn; xmlChar *lang; Data_Get_Struct(self, ruby_xml_node, rxn); lang = xmlNodeGetLang(rxn->node); if (lang == NULL) return(Qnil); else return(rb_str_new2((const char*)lang)); } // TODO node_lang_set should support setting back to nil /* * call-seq: * node.lang = "string" * * Set the language for this node. This affects the value * of the xml:lang attribute. */ VALUE ruby_xml_node_lang_set(VALUE self, VALUE lang) { ruby_xml_node *node; Check_Type(lang, T_STRING); Data_Get_Struct(self, ruby_xml_node, node); xmlNodeSetLang(node->node, (xmlChar*)StringValuePtr(lang)); return(Qtrue); } /* * call-seq: * node.last => node * * Obtain the last child node of this node, if any. */ VALUE ruby_xml_node_last_get(VALUE self) { ruby_xml_node *rxn; xmlNodePtr node; Data_Get_Struct(self, ruby_xml_node, rxn); switch (rxn->node->type) { case XML_ELEMENT_NODE: case XML_ENTITY_REF_NODE: case XML_ENTITY_NODE: case XML_PI_NODE: case XML_COMMENT_NODE: case XML_DOCUMENT_NODE: #ifdef LIBXML_DOCB_ENABLED case XML_DOCB_DOCUMENT_NODE: #endif case XML_HTML_DOCUMENT_NODE: case XML_DTD_NODE: node = rxn->node->last; break; case XML_ATTRIBUTE_NODE: { xmlAttrPtr attr = (xmlAttrPtr) rxn->node; node = attr->last; } default: node = NULL; break; } if (node == NULL) return(Qnil); else return(ruby_xml_node_new2(cXMLNode, rxn->xd, node)); } /* * call-seq: * node.last? => (true|false) * * Determine whether this node has a last child node. */ VALUE ruby_xml_node_last_q(VALUE self) { ruby_xml_node *rxn; xmlNodePtr node; Data_Get_Struct(self, ruby_xml_node, rxn); switch (rxn->node->type) { case XML_ELEMENT_NODE: case XML_ENTITY_REF_NODE: case XML_ENTITY_NODE: case XML_PI_NODE: case XML_COMMENT_NODE: case XML_DOCUMENT_NODE: #ifdef LIBXML_DOCB_ENABLED case XML_DOCB_DOCUMENT_NODE: #endif case XML_HTML_DOCUMENT_NODE: case XML_DTD_NODE: node = rxn->node->last; break; case XML_ATTRIBUTE_NODE: { xmlAttrPtr attr = (xmlAttrPtr) rxn->node; node = attr->last; } default: node = NULL; break; } if (node == NULL) return(Qfalse); else return(Qtrue); } /* * call-seq: * node.line_num => num * * Obtain the line number (in the XML document) that this * node was read from. If +default_line_numbers+ is set * false (the default), this method returns zero. */ VALUE ruby_xml_node_line_num(VALUE self) { ruby_xml_node *rxn; long line_num; Data_Get_Struct(self, ruby_xml_node, rxn); if (!xmlLineNumbersDefaultValue) rb_warn("Line numbers were not retained: use XML::Parser::default_line_numbers=true"); line_num = xmlGetLineNo(rxn->node); if (line_num == -1) return(Qnil); else return(INT2NUM((long)line_num)); } /* * call-seq: * node.xlink? => (true|false) * * Determine whether this node is an xlink node. */ VALUE ruby_xml_node_xlink_q(VALUE self) { ruby_xml_node *node; ruby_xml_document *doc; xlinkType xlt; Data_Get_Struct(self, ruby_xml_node, node); Data_Get_Struct(node->xd, ruby_xml_document, doc); xlt = xlinkIsLink(doc->doc, node->node); if (xlt == XLINK_TYPE_NONE) return(Qfalse); else return(Qtrue); } /* * call-seq: * node.xlink_type => num * * Obtain the type identifier for this xlink, if applicable. * If this is not an xlink node (see +xlink?+), will return * nil. */ VALUE ruby_xml_node_xlink_type(VALUE self) { ruby_xml_node *node; ruby_xml_document *doc; xlinkType xlt; Data_Get_Struct(self, ruby_xml_node, node); Data_Get_Struct(node->xd, ruby_xml_document, doc); xlt = xlinkIsLink(doc->doc, node->node); if (xlt == XLINK_TYPE_NONE) return(Qnil); else return(INT2NUM(xlt)); } /* * call-seq: * node.xlink_type_name => "string" * * Obtain the type name for this xlink, if applicable. * If this is not an xlink node (see +xlink?+), will return * nil. */ VALUE ruby_xml_node_xlink_type_name(VALUE self) { ruby_xml_node *node; ruby_xml_document *doc; xlinkType xlt; Data_Get_Struct(self, ruby_xml_node, node); Data_Get_Struct(node->xd, ruby_xml_document, doc); xlt = xlinkIsLink(doc->doc, node->node); switch(xlt) { case XLINK_TYPE_NONE: return(Qnil); case XLINK_TYPE_SIMPLE: return(rb_str_new2("simple")); case XLINK_TYPE_EXTENDED: return(rb_str_new2("extended")); case XLINK_TYPE_EXTENDED_SET: return(rb_str_new2("extended_set")); default: rb_fatal("Unknowng xlink type, %d", xlt); } } static void ruby_xml_node_mark(ruby_xml_node *rxn) { if (rxn == NULL) return; if (!NIL_P(rxn->xd)) rb_gc_mark(rxn->xd); } /* * call-seq: * node.name => "string" * * Obtain this node's name. */ VALUE ruby_xml_node_name_get(VALUE self) { ruby_xml_node *rxn; const xmlChar *name; Data_Get_Struct(self, ruby_xml_node, rxn); switch (rxn->node->type) { case XML_DOCUMENT_NODE: #ifdef LIBXML_DOCB_ENABLED case XML_DOCB_DOCUMENT_NODE: #endif case XML_HTML_DOCUMENT_NODE: { xmlDocPtr doc = (xmlDocPtr) rxn->node; name = doc->URL; break; } case XML_ATTRIBUTE_NODE: { xmlAttrPtr attr = (xmlAttrPtr) rxn->node; name = attr->name; break; } case XML_NAMESPACE_DECL: { xmlNsPtr ns = (xmlNsPtr) rxn->node; name = ns->prefix; break; } default: name = rxn->node->name; break; } if (rxn->node->name == NULL) return(Qnil); else return(rb_str_new2((const char*)name)); } /* * call-seq: * node.name = "string" * * Set this node's name. */ VALUE ruby_xml_node_name_set(VALUE self, VALUE name) { ruby_xml_node *node; Check_Type(name, T_STRING); Data_Get_Struct(self, ruby_xml_node, node); xmlNodeSetName(node->node, (xmlChar*)StringValuePtr(name)); return(Qtrue); } /* * call-seq: * node.namespace => [namespace, ..., namespace] * * Obtain an array of +XML::NS+ objects representing * this node's xmlns attributes */ VALUE ruby_xml_node_namespace_get(VALUE self) { ruby_xml_node *node; xmlNsPtr *nsList, *cur; VALUE arr, ns; Data_Get_Struct(self, ruby_xml_node, node); if (node->node == NULL) return(Qnil); nsList = xmlGetNsList(node->node->doc, node->node); if (nsList == NULL) return(Qnil); arr = rb_ary_new(); for (cur = nsList; *cur != NULL; cur++) { ns = ruby_xml_ns_new2(cXMLNS, node->xd, *cur); if (ns == Qnil) continue; else rb_ary_push(arr, ns); } xmlFree(nsList); return(arr); } /* * call-seq: * node.namespace_node => namespace. * * Obtain this node's namespace node. */ VALUE ruby_xml_node_namespace_get_node(VALUE self) { ruby_xml_node *node; Data_Get_Struct(self, ruby_xml_node, node); if (node->node->ns == NULL) return(Qnil); else return(ruby_xml_ns_new2(cXMLNS, node->xd, node->node->ns)); } // TODO namespace_set can take varargs (in fact, must if used // with strings), but I cannot see how you can call // that version, apart from with 'send' // // Would sure be nice to support foo.namespace['foo'] = 'bar' // but maybe that's not practical... /* * call-seq: * node.namespace = namespace * * Add the specified XML::NS object to this node's xmlns attributes. */ VALUE ruby_xml_node_namespace_set(int argc, VALUE *argv, VALUE self) { VALUE rns, rprefix; ruby_xml_node *rxn; ruby_xml_ns *rxns; xmlNsPtr ns; char *cp, *href; Data_Get_Struct(self, ruby_xml_node, rxn); switch (argc) { case 1: rns = argv[0]; if (TYPE(rns) == T_STRING) { cp = strchr(StringValuePtr(rns), (int)':'); if (cp == NULL) { rprefix = rns; href = NULL; } else { rprefix = rb_str_new(StringValuePtr(rns), (int)((long)cp - (long)StringValuePtr(rns))); href = &cp[1]; /* skip the : */ } } else if (rb_obj_is_kind_of(rns, cXMLNS) == Qtrue) { Data_Get_Struct(self, ruby_xml_ns, rxns); xmlSetNs(rxn->node, rxns->ns); return(rns); } else rb_raise(rb_eTypeError, "must pass a string or an XML::Ns object"); /* Fall through to next case because when argc == 1, we need to * manually setup the additional args unless the arg passed is of * cXMLNS type */ case 2: /* Don't want this code run in the fall through case */ if (argc == 2) { rprefix = argv[0]; href = StringValuePtr(argv[1]); } ns = xmlNewNs(rxn->node, (xmlChar*)href, (xmlChar*)StringValuePtr(rprefix)); if (ns == NULL) rb_raise(eXMLNodeSetNamespace, "unable to set the namespace"); else return(ruby_xml_ns_new2(cXMLNS, rxn->xd, ns)); break; default: rb_raise(rb_eArgError, "wrong number of arguments (1 or 2)"); } /* can't get here */ return(Qnil); } /* * call-seq: * node.namespace? => (true|false) * * Determine whether this node *is* (not has) a namespace * node. */ VALUE ruby_xml_node_namespace_q(VALUE self) { ruby_xml_node *rxn; Data_Get_Struct(self, ruby_xml_node, rxn); if (rxn->node->type == XML_NAMESPACE_DECL) return(Qtrue); else return(Qfalse); } VALUE ruby_xml_node_new(VALUE class, xmlNodePtr node) { ruby_xml_node *rxn; rxn = ALLOC(ruby_xml_node); rxn->is_ptr = 0; rxn->node = node; rxn->xd = Qnil; return(Data_Wrap_Struct(class, ruby_xml_node_mark, ruby_xml_node_free, rxn)); } VALUE ruby_xml_node_new2(VALUE class, VALUE xd, xmlNodePtr node) { ruby_xml_node *rxn; rxn = ALLOC(ruby_xml_node); rxn->is_ptr = 1; rxn->node = node; if (NIL_P(xd)) rxn->xd = Qnil; else rxn->xd = xd; return(Data_Wrap_Struct(class, ruby_xml_node_mark, ruby_xml_node_free, rxn)); } /* * call-seq: * node.next => node * * Obtain the next sibling node, if any. */ VALUE ruby_xml_node_next_get(VALUE self) { ruby_xml_node *rxn; xmlNodePtr node; Data_Get_Struct(self, ruby_xml_node, rxn); switch (rxn->node->type) { case XML_DOCUMENT_NODE: #ifdef LIBXML_DOCB_ENABLED case XML_DOCB_DOCUMENT_NODE: #endif case XML_HTML_DOCUMENT_NODE: node = NULL; break; case XML_ATTRIBUTE_NODE: { xmlAttrPtr attr = (xmlAttrPtr) rxn->node; node = (xmlNodePtr) attr->next; break; } case XML_NAMESPACE_DECL: { xmlNsPtr ns = (xmlNsPtr) rxn->node; node = (xmlNodePtr) ns->next; break; } default: node = rxn->node->next; break; } if (node == NULL) return(Qnil); else return(ruby_xml_node_new2(cXMLNode, rxn->xd, node)); } /* * call-seq: * node.next? => (true|false) * * Determine whether this node has a next sibling. */ VALUE ruby_xml_node_next_q(VALUE self) { ruby_xml_node *rxn; xmlNodePtr node; Data_Get_Struct(self, ruby_xml_node, rxn); switch (rxn->node->type) { case XML_DOCUMENT_NODE: #ifdef LIBXML_DOCB_ENABLED case XML_DOCB_DOCUMENT_NODE: #endif case XML_HTML_DOCUMENT_NODE: node = NULL; break; case XML_ATTRIBUTE_NODE: { xmlAttrPtr attr = (xmlAttrPtr) rxn->node; node = (xmlNodePtr) attr->next; break; } case XML_NAMESPACE_DECL: { xmlNsPtr ns = (xmlNsPtr) rxn->node; node = (xmlNodePtr) ns->next; break; } default: node = rxn->node->next; break; } if (node == NULL) return(Qfalse); else return(Qtrue); } /* * call-seq: * node.notation? => (true|false) * * Determine whether this is a notation node */ VALUE ruby_xml_node_notation_q(VALUE self) { ruby_xml_node *rxn; Data_Get_Struct(self, ruby_xml_node, rxn); if (rxn->node->type == XML_NOTATION_NODE) return(Qtrue); else return(Qfalse); } /* * call-seq: * node.ns? => (true|false) * * Determine whether this node is a namespace node. */ VALUE ruby_xml_node_ns_q(VALUE self) { ruby_xml_node *rxn; Data_Get_Struct(self, ruby_xml_node, rxn); if (rxn->node->ns == NULL) return(Qfalse); else return(Qtrue); } /* * call-seq: * node.ns_def => namespace * * Obtain this node's default namespace. */ VALUE ruby_xml_node_ns_def_get(VALUE self) { ruby_xml_node *rxn; Data_Get_Struct(self, ruby_xml_node, rxn); if (rxn->node->nsDef == NULL) return(Qnil); else return(ruby_xml_ns_new2(cXMLNS, rxn->xd, rxn->node->nsDef)); } /* * call-seq: * node.ns_def? => (true|false) * * Obtain an array of +XML::NS+ objects representing * this node's xmlns attributes */ VALUE ruby_xml_node_ns_def_q(VALUE self) { ruby_xml_node *rxn; Data_Get_Struct(self, ruby_xml_node, rxn); if (rxn->node->nsDef == NULL) return(Qfalse); else return(Qtrue); } /* * call-seq: * node.parent => node * * Obtain this node's parent node, if any. */ VALUE ruby_xml_node_parent_get(VALUE self) { ruby_xml_node *rxn; xmlNodePtr node; Data_Get_Struct(self, ruby_xml_node, rxn); switch (rxn->node->type) { case XML_DOCUMENT_NODE: case XML_HTML_DOCUMENT_NODE: #ifdef LIBXML_DOCB_ENABLED case XML_DOCB_DOCUMENT_NODE: #endif node = NULL; break; case XML_ATTRIBUTE_NODE: { xmlAttrPtr attr = (xmlAttrPtr) rxn->node; node = attr->parent; } case XML_ENTITY_DECL: case XML_NAMESPACE_DECL: case XML_XINCLUDE_START: case XML_XINCLUDE_END: node = NULL; break; default: node = rxn->node->parent; break; } if (node == NULL) return(Qnil); else return(ruby_xml_node_new2(cXMLNode, rxn->xd, node)); } /* * call-seq: * node.parent? => (true|false) * * Determine whether this node has a parent node. */ VALUE ruby_xml_node_parent_q(VALUE self) { ruby_xml_node *rxn; xmlNodePtr node; Data_Get_Struct(self, ruby_xml_node, rxn); switch (rxn->node->type) { case XML_DOCUMENT_NODE: case XML_HTML_DOCUMENT_NODE: #ifdef LIBXML_DOCB_ENABLED case XML_DOCB_DOCUMENT_NODE: #endif node = NULL; break; case XML_ATTRIBUTE_NODE: { xmlAttrPtr attr = (xmlAttrPtr) rxn->node; node = attr->parent; } case XML_ENTITY_DECL: case XML_NAMESPACE_DECL: case XML_XINCLUDE_START: case XML_XINCLUDE_END: node = NULL; break; default: node = rxn->node->parent; break; } if (node == NULL) return(Qfalse); else return(Qtrue); } /* * call-seq: * node.path => path * * Obtain this node's path. */ VALUE ruby_xml_node_path(VALUE self) { ruby_xml_node *rxn; xmlChar *path; Data_Get_Struct(self, ruby_xml_node, rxn); path = xmlGetNodePath(rxn->node); if (path == NULL) return(Qnil); else return(rb_str_new2((const char*)path)); } /* * call-seq: * node.pi? => (true|false) * * Determine whether this is a processing instruction node. */ VALUE ruby_xml_node_pi_q(VALUE self) { ruby_xml_node *rxn; Data_Get_Struct(self, ruby_xml_node, rxn); if (rxn->node->type == XML_PI_NODE) return(Qtrue); else return(Qfalse); } /* * call-seq: * node.pointer => node_set * * Evaluates an XPointer expression relative to this node. */ VALUE ruby_xml_node_pointer(VALUE self, VALUE xptr_str) { return(ruby_xml_xpointer_point2(self, xptr_str)); } /* * call-seq: * node.prev => node * * Obtain the previous sibling, if any. */ VALUE ruby_xml_node_prev_get(VALUE self) { ruby_xml_node *rxn; xmlNodePtr node; Data_Get_Struct(self, ruby_xml_node, rxn); switch (rxn->node->type) { case XML_DOCUMENT_NODE: #ifdef LIBXML_DOCB_ENABLED case XML_DOCB_DOCUMENT_NODE: #endif case XML_HTML_DOCUMENT_NODE: case XML_NAMESPACE_DECL: node = NULL; break; case XML_ATTRIBUTE_NODE: { xmlAttrPtr attr = (xmlAttrPtr) rxn->node; node = (xmlNodePtr) attr->next; } break; default: node = rxn->node->next; break; } if (node == NULL) return(Qnil); else return(ruby_xml_node_new2(cXMLNode, rxn->xd, node)); } /* * call-seq: * node.prev? => (true|false) * * Determines whether this node has a previous sibling node. */ VALUE ruby_xml_node_prev_q(VALUE self) { ruby_xml_node *rxn; xmlNodePtr node; Data_Get_Struct(self, ruby_xml_node, rxn); switch (rxn->node->type) { case XML_DOCUMENT_NODE: #ifdef LIBXML_DOCB_ENABLED case XML_DOCB_DOCUMENT_NODE: #endif case XML_HTML_DOCUMENT_NODE: case XML_NAMESPACE_DECL: node = NULL; break; case XML_ATTRIBUTE_NODE: { xmlAttrPtr attr = (xmlAttrPtr) rxn->node; node = (xmlNodePtr) attr->next; } break; default: node = rxn->node->next; break; } if (node == NULL) return(Qfalse); else return(Qtrue); } /* * call-seq: * node.property("name") => "string" * node["name"] => "string" * * Obtain the named property. */ VALUE ruby_xml_node_property_get(VALUE self, VALUE prop) { ruby_xml_node *rxn; xmlChar *p; VALUE r; Check_Type(prop, T_STRING); Data_Get_Struct(self, ruby_xml_node, rxn); p = xmlGetProp(rxn->node, (xmlChar*)StringValuePtr(prop)); if (p == NULL) r = Qnil; else { r = rb_str_new2((const char*)p); xmlFree(p); } return r; } /* * call-seq: * node["name"] = "string" * * Set the named property. */ VALUE ruby_xml_node_property_set(VALUE self, VALUE key, VALUE val) { ruby_xml_node *node; ruby_xml_attr *rxa; xmlAttrPtr attr; VALUE rattr; Data_Get_Struct(self, ruby_xml_node, node); Check_Type(key, T_STRING); if( val == Qnil ) { attr = xmlSetProp(node->node, (xmlChar*)StringValuePtr(key), NULL); xmlRemoveProp( attr ); return Qnil; } else { Check_Type(val, T_STRING); } attr = xmlSetProp(node->node, (xmlChar*)StringValuePtr(key), (xmlChar*)StringValuePtr(val)); if (attr == NULL) { attr = xmlNewProp(node->node, (xmlChar*)StringValuePtr(key), (xmlChar*)StringValuePtr(val)); if (attr == NULL) return(Qnil); } rattr = ruby_xml_attr_new(cXMLAttr, node->xd, attr); Data_Get_Struct(rattr, ruby_xml_attr, rxa); rxa->is_ptr = 1; return(rattr); } /* * call-seq: * node.properties => attributes * * Returns the +XML::Attr+ for this node. */ VALUE ruby_xml_node_properties_get(VALUE self) { ruby_xml_node *node; xmlAttrPtr attr; Data_Get_Struct(self, ruby_xml_node, node); if (node->node->type == XML_ELEMENT_NODE) { attr = node->node->properties; return(ruby_xml_attr_new2(cXMLAttr, node->xd, attr)); } else { return(Qnil); } } /* * call-seq: * node.properties? => (true|false) * * Determine whether this node has properties * (attributes). */ VALUE ruby_xml_node_properties_q(VALUE self) { ruby_xml_node *rxn; Data_Get_Struct(self, ruby_xml_node, rxn); if (rxn->node->type == XML_ELEMENT_NODE && rxn->node->properties != NULL) return(Qtrue); else return(Qfalse); } /* * call-seq: * node.remove! => nil * * Removes this node from it's parent. */ VALUE ruby_xml_node_remove_ex(VALUE self) { ruby_xml_node *rxn; Data_Get_Struct(self, ruby_xml_node, rxn); xmlUnlinkNode(rxn->node); return(Qnil); } /* * call-seq: * node.search_href => namespace * * Search for a namespace by href. */ VALUE ruby_xml_node_search_href(VALUE self, VALUE href) { ruby_xml_document *doc; ruby_xml_node *node; Check_Type(href, T_STRING); Data_Get_Struct(self, ruby_xml_node, node); Data_Get_Struct(node->xd, ruby_xml_document, doc); return(ruby_xml_ns_new2(cXMLNS, node->xd, xmlSearchNsByHref(doc->doc, node->node, (xmlChar*)StringValuePtr(href)))); } /* * call-seq: * node.search_ns => namespace * * Search for a namespace by namespace. */ VALUE ruby_xml_node_search_ns(VALUE self, VALUE ns) { ruby_xml_document *doc; ruby_xml_node *node; Check_Type(ns, T_STRING); Data_Get_Struct(self, ruby_xml_node, node); Data_Get_Struct(node->xd, ruby_xml_document, doc); return(ruby_xml_ns_new2(cXMLNS, node->xd, xmlSearchNs(doc->doc, node->node, (xmlChar*)StringValuePtr(ns)))); } VALUE ruby_xml_node_set_ptr(VALUE node, int is_ptr) { ruby_xml_node *rxn; Data_Get_Struct(node, ruby_xml_node, rxn); rxn->is_ptr = is_ptr; return(Qtrue); } /* * call-seq: * node.sibling(node) => node * * Add the specified node as a sibling of this node. */ VALUE ruby_xml_node_sibling_set(VALUE self, VALUE rnode) { ruby_xml_node *cnode, *pnode; xmlNodePtr ret; if (rb_obj_is_kind_of(rnode, cXMLNode) == Qfalse) rb_raise(rb_eTypeError, "Must pass an XML::Node object"); Data_Get_Struct(self, ruby_xml_node, pnode); Data_Get_Struct(rnode, ruby_xml_node, cnode); ret = xmlAddSibling(pnode->node, cnode->node); if (ret == NULL) rb_raise(eXMLNodeFailedModify, "unable to add a sibling to the document"); cnode->is_ptr = 1; return(ruby_xml_node_new2(cXMLNode, pnode->xd, ret)); } /* * call-seq: * node.space_preserve => (true|false) * * Determine whether this node preserves whitespace. */ VALUE ruby_xml_node_space_preserve_get(VALUE self) { ruby_xml_node *rxn; Data_Get_Struct(self, ruby_xml_node, rxn); return(INT2NUM(xmlNodeGetSpacePreserve(rxn->node))); } /* * call-seq: * node.space_preserve = true|false * * Control whether this node preserves whitespace. */ VALUE ruby_xml_node_space_preserve_set(VALUE self, VALUE bool) { ruby_xml_node *rxn; Data_Get_Struct(self, ruby_xml_node, rxn); if (TYPE(bool) == T_FALSE) xmlNodeSetSpacePreserve(rxn->node, 1); else xmlNodeSetSpacePreserve(rxn->node, 0); return(Qnil); } /* * call-seq: * node.text? => (true|false) * * Determine whether this node has text. */ VALUE ruby_xml_node_text_q(VALUE self) { ruby_xml_node *rxn; Data_Get_Struct(self, ruby_xml_node, rxn); if (rxn->node == NULL) return(Qnil); return((xmlNodeIsText(rxn->node) == 1) ? Qtrue : Qfalse); } /* * call-seq: * node.to_s => "string" * * Coerce this node to a string representation of * it's XML. */ VALUE ruby_xml_node_to_s(VALUE self) { ruby_xml_node *rxn; xmlBufferPtr buf; VALUE result; Data_Get_Struct(self, ruby_xml_node, rxn); buf = xmlBufferCreate(); xmlNodeDump(buf, rxn->node->doc, rxn->node, 0, 1); result = rb_str_new2((const char*)buf->content); xmlBufferFree(buf); return result; } /* * call-seq: * node.type => num * * Obtain this node's type identifier. */ VALUE ruby_xml_node_type(VALUE self) { ruby_xml_node *rxn; Data_Get_Struct(self, ruby_xml_node, rxn); return(INT2NUM(rxn->node->type)); } /* * call-seq: * node.type_name => num * * Obtain this node's type name. */ VALUE ruby_xml_node_type_name(VALUE self) { ruby_xml_node *rxn; Data_Get_Struct(self, ruby_xml_node, rxn); switch(rxn->node->type) { case XML_ELEMENT_NODE: return(rb_str_new2("element")); case XML_ATTRIBUTE_NODE: return(rb_str_new2("attribute")); case XML_TEXT_NODE: return(rb_str_new2("text")); case XML_CDATA_SECTION_NODE: return(rb_str_new2("cdata")); case XML_ENTITY_REF_NODE: return(rb_str_new2("entity_ref")); case XML_ENTITY_NODE: return(rb_str_new2("entity")); case XML_PI_NODE: return(rb_str_new2("pi")); case XML_COMMENT_NODE: return(rb_str_new2("comment")); case XML_DOCUMENT_NODE: return(rb_str_new2("document_xml")); case XML_DOCUMENT_TYPE_NODE: return(rb_str_new2("doctype")); case XML_DOCUMENT_FRAG_NODE: return(rb_str_new2("fragment")); case XML_NOTATION_NODE: return(rb_str_new2("notation")); case XML_HTML_DOCUMENT_NODE: return(rb_str_new2("document_html")); case XML_DTD_NODE: return(rb_str_new2("dtd")); case XML_ELEMENT_DECL: return(rb_str_new2("elem_decl")); case XML_ATTRIBUTE_DECL: return(rb_str_new2("attribute_decl")); case XML_ENTITY_DECL: return(rb_str_new2("entity_decl")); case XML_NAMESPACE_DECL: return(rb_str_new2("namespace")); case XML_XINCLUDE_START: return(rb_str_new2("xinclude_start")); case XML_XINCLUDE_END: return(rb_str_new2("xinclude_end")); #ifdef LIBXML_DOCB_ENABLED case XML_DOCB_DOCUMENT_NODE: return(rb_str_new2("document_docbook")); #endif default: rb_raise(eXMLNodeUnknownType, "Unknown node type: %n", rxn->node->type); return(Qfalse); } } /* * call-seq: * node.xinclude_end? => num * * Determine whether this node is an xinclude end node. */ VALUE ruby_xml_node_xinclude_end_q(VALUE self) { ruby_xml_node *rxn; Data_Get_Struct(self, ruby_xml_node, rxn); if (rxn->node->type == XML_XINCLUDE_END) return(Qtrue); else return(Qfalse); } /* * call-seq: * node.xinclude_start? => num * * Determine whether this node is an xinclude start node. */ VALUE ruby_xml_node_xinclude_start_q(VALUE self) { ruby_xml_node *rxn; Data_Get_Struct(self, ruby_xml_node, rxn); if (rxn->node->type == XML_XINCLUDE_START) return(Qtrue); else return(Qfalse); } // TODO my gut tells me this is where our sigseg etc. problems start... /* * call-seq: * node.copy => node * * Create a copy of this node. */ VALUE ruby_xml_node_copy(VALUE self, VALUE deep) { /* MUFF */ ruby_xml_node *rxn, *n_rxn; VALUE n_node; Data_Get_Struct(self, ruby_xml_node, rxn); n_node = ruby_xml_node_new(cXMLNode, NULL); // class?? Data_Get_Struct(n_node, ruby_xml_node, n_rxn); n_rxn->node = xmlCopyNode( rxn->node, ((deep==Qnil)||(deep==Qfalse))?0:1 ); if (rxn->node == NULL) return(Qnil); return n_node; } // Rdoc needs to know #ifdef RDOC_NEVER_DEFINED mXML = rb_define_module("XML"); #endif void ruby_init_xml_node(void) { cXMLNode = rb_define_class_under(mXML, "Node", rb_cObject); eXMLNodeSetNamespace = rb_define_class_under(cXMLNode, "SetNamespace", rb_eException); eXMLNodeFailedModify = rb_define_class_under(cXMLNode, "FailedModify", rb_eException); eXMLNodeUnknownType = rb_define_class_under(cXMLNode, "UnknownType", rb_eException); rb_define_const(cXMLNode, "SPACE_DEFAULT", INT2NUM(0)); rb_define_const(cXMLNode, "SPACE_PRESERVE", INT2NUM(1)); rb_define_const(cXMLNode, "SPACE_NOT_INHERIT", INT2NUM(-1)); rb_define_const(cXMLNode, "XLINK_ACTUATE_AUTO", INT2NUM(1)); rb_define_const(cXMLNode, "XLINK_ACTUATE_NONE", INT2NUM(0)); rb_define_const(cXMLNode, "XLINK_ACTUATE_ONREQUEST", INT2NUM(2)); rb_define_const(cXMLNode, "XLINK_SHOW_EMBED", INT2NUM(2)); rb_define_const(cXMLNode, "XLINK_SHOW_NEW", INT2NUM(1)); rb_define_const(cXMLNode, "XLINK_SHOW_NONE", INT2NUM(0)); rb_define_const(cXMLNode, "XLINK_SHOW_REPLACE", INT2NUM(3)); rb_define_const(cXMLNode, "XLINK_TYPE_EXTENDED", INT2NUM(2)); rb_define_const(cXMLNode, "XLINK_TYPE_EXTENDED_SET", INT2NUM(3)); rb_define_const(cXMLNode, "XLINK_TYPE_NONE", INT2NUM(0)); rb_define_const(cXMLNode, "XLINK_TYPE_SIMPLE", INT2NUM(1)); rb_define_singleton_method(cXMLNode, "new", ruby_xml_node_initialize, -1); rb_define_method(cXMLNode, "<<", ruby_xml_node_content_add, 1); rb_define_method(cXMLNode, "[]", ruby_xml_node_property_get, 1); rb_define_method(cXMLNode, "[]=", ruby_xml_node_property_set, 2); rb_define_method(cXMLNode, "attribute?", ruby_xml_node_attribute_q, 0); rb_define_method(cXMLNode, "attribute_decl?", ruby_xml_node_attribute_decl_q, 0); rb_define_method(cXMLNode, "base", ruby_xml_node_base_get, 0); rb_define_method(cXMLNode, "base=", ruby_xml_node_base_set, 1); rb_define_method(cXMLNode, "blank?", ruby_xml_node_empty_q, 0); rb_define_method(cXMLNode, "cdata?", ruby_xml_node_cdata_q, 0); rb_define_method(cXMLNode, "comment?", ruby_xml_node_comment_q, 0); rb_define_method(cXMLNode, "copy", ruby_xml_node_copy, 1); rb_define_method(cXMLNode, "child", ruby_xml_node_child_get, 0); rb_define_method(cXMLNode, "child?", ruby_xml_node_child_q, 0); rb_define_method(cXMLNode, "child=", ruby_xml_node_child_set, 1); rb_define_method(cXMLNode, "children", ruby_xml_node_child_get, 0); rb_define_method(cXMLNode, "children?", ruby_xml_node_child_q, 0); rb_define_method(cXMLNode, "content", ruby_xml_node_content_get, 0); rb_define_method(cXMLNode, "content=", ruby_xml_node_content_set, 1); rb_define_method(cXMLNode, "content_stripped", ruby_xml_node_content_stripped_get, 0); rb_define_method(cXMLNode, "doc", ruby_xml_node_doc, 0); rb_define_method(cXMLNode, "docbook_doc?", ruby_xml_node_docbook_doc_q, 0); rb_define_method(cXMLNode, "doctype?", ruby_xml_node_doctype_q, 0); rb_define_method(cXMLNode, "document?", ruby_xml_node_document_q, 0); rb_define_method(cXMLNode, "dtd?", ruby_xml_node_dtd_q, 0); rb_define_method(cXMLNode, "dump", ruby_xml_node_dump, 0); rb_define_method(cXMLNode, "debug_dump", ruby_xml_node_debug_dump, 0); rb_define_method(cXMLNode, "element?", ruby_xml_node_element_q, 0); rb_define_method(cXMLNode, "element_decl?", ruby_xml_node_element_decl_q, 0); rb_define_method(cXMLNode, "empty?", ruby_xml_node_empty_q, 0); rb_define_method(cXMLNode, "entity?", ruby_xml_node_entity_q, 0); rb_define_method(cXMLNode, "entity_ref?", ruby_xml_node_entity_ref_q, 0); rb_define_method(cXMLNode, "eql?", ruby_xml_node_eql_q, 1); rb_define_method(cXMLNode, "find", ruby_xml_node_find, -1); rb_define_method(cXMLNode, "find_first", ruby_xml_node_find_first, -1); rb_define_method(cXMLNode, "fragment?", ruby_xml_node_fragment_q, 0); rb_define_method(cXMLNode, "hash", ruby_xml_node_hash, 0); rb_define_method(cXMLNode, "html_doc?", ruby_xml_node_html_doc_q, 0); rb_define_method(cXMLNode, "lang", ruby_xml_node_lang_get, 0); rb_define_method(cXMLNode, "lang=", ruby_xml_node_lang_set, 1); rb_define_method(cXMLNode, "last", ruby_xml_node_last_get, 0); rb_define_method(cXMLNode, "last?", ruby_xml_node_last_q, 0); rb_define_method(cXMLNode, "line_num", ruby_xml_node_line_num, 0); rb_define_method(cXMLNode, "name", ruby_xml_node_name_get, 0); rb_define_method(cXMLNode, "name=", ruby_xml_node_name_set, 1); rb_define_method(cXMLNode, "namespace", ruby_xml_node_namespace_get, 0); rb_define_method(cXMLNode, "namespace_node", ruby_xml_node_namespace_get_node, 0); rb_define_method(cXMLNode, "namespace?", ruby_xml_node_namespace_q, 0); rb_define_method(cXMLNode, "namespace=", ruby_xml_node_namespace_set, -1); rb_define_method(cXMLNode, "next", ruby_xml_node_next_get, 0); rb_define_method(cXMLNode, "next?", ruby_xml_node_next_q, 0); rb_define_method(cXMLNode, "node_type", ruby_xml_node_type, 0); rb_define_method(cXMLNode, "node_type_name", ruby_xml_node_type_name, 0); rb_define_method(cXMLNode, "notation?", ruby_xml_node_notation_q, 0); rb_define_method(cXMLNode, "ns", ruby_xml_node_namespace_get, 0); rb_define_method(cXMLNode, "ns?", ruby_xml_node_ns_q, 0); rb_define_method(cXMLNode, "ns_def", ruby_xml_node_ns_def_get, 0); rb_define_method(cXMLNode, "ns_def?", ruby_xml_node_ns_def_q, 0); rb_define_method(cXMLNode, "parent", ruby_xml_node_parent_get, 0); rb_define_method(cXMLNode, "parent?", ruby_xml_node_parent_q, 0); rb_define_method(cXMLNode, "path", ruby_xml_node_path, 0); rb_define_method(cXMLNode, "pi?", ruby_xml_node_pi_q, 0); rb_define_method(cXMLNode, "pointer", ruby_xml_node_pointer, 1); rb_define_method(cXMLNode, "prev", ruby_xml_node_prev_get, 0); rb_define_method(cXMLNode, "prev?", ruby_xml_node_prev_q, 0); rb_define_method(cXMLNode, "property", ruby_xml_node_property_get, 1); rb_define_method(cXMLNode, "properties", ruby_xml_node_properties_get, 0); rb_define_method(cXMLNode, "properties?", ruby_xml_node_properties_q, 0); rb_define_method(cXMLNode, "remove!", ruby_xml_node_remove_ex, 0); rb_define_method(cXMLNode, "search_ns", ruby_xml_node_search_ns, 1); rb_define_method(cXMLNode, "search_href", ruby_xml_node_search_href, 1); rb_define_method(cXMLNode, "sibling=", ruby_xml_node_sibling_set, 1); rb_define_method(cXMLNode, "space_preserve", ruby_xml_node_space_preserve_get, 0); rb_define_method(cXMLNode, "space_preserve=", ruby_xml_node_space_preserve_set, 1); rb_define_method(cXMLNode, "text?", ruby_xml_node_text_q, 0); rb_define_method(cXMLNode, "to_s", ruby_xml_node_to_s, 0); rb_define_method(cXMLNode, "xinclude_end?", ruby_xml_node_xinclude_end_q, 0); rb_define_method(cXMLNode, "xinclude_start?", ruby_xml_node_xinclude_start_q, 0); rb_define_method(cXMLNode, "xlink?", ruby_xml_node_xlink_q, 0); rb_define_method(cXMLNode, "xlink_type", ruby_xml_node_xlink_type, 0); rb_define_method(cXMLNode, "xlink_type_name", ruby_xml_node_xlink_type_name, 0); rb_define_alias(cXMLNode, "==", "eql?"); } ruby-mkrf-0.2.3.orig/test/sample_files/libxml-ruby-0.3.8/ext/xml/ruby_xml_schema.c0000644000000000000000000000673411672453175026375 0ustar rootroot#include "libxml.h" #include "ruby_xml_schema.h" VALUE cXMLSchema; static void ruby_xml_schema_mark(ruby_xml_schema *rxschema) { return; } void ruby_xml_schema_free(ruby_xml_schema *rxschema) { if (rxschema->schema != NULL) { xmlSchemaFree(rxschema->schema); rxschema->schema = NULL; } free(rxschema); } /* * call-seq: * XML::Schema.new(schema_uri) => schema * * Create a new schema from the specified URI. */ VALUE ruby_xml_schema_init_from_uri(int argc, VALUE *argv, VALUE class) { VALUE uri; xmlSchemaParserCtxtPtr parser; xmlSchemaPtr sptr; switch (argc) { case 1: rb_scan_args(argc, argv, "10", &uri); Check_Type(uri, T_STRING); parser = xmlSchemaNewParserCtxt(StringValuePtr(uri)); sptr = xmlSchemaParse(parser); xmlSchemaFreeParserCtxt(parser); break; default: rb_raise(rb_eArgError, "wrong number of arguments (need 1)"); } return Qnil; } /* * call-seq: * XML::Schema.from_string("schema_data") => "value" * * Create a new schema using the specified string. */ VALUE ruby_xml_schema_init_from_str(int argc, VALUE *argv, VALUE class) { VALUE schema_str; xmlSchemaParserCtxtPtr parser; //xmlSchemaPtr sptr; ruby_xml_schema *rxschema; switch (argc) { case 1: rb_scan_args(argc, argv, "10", &schema_str); Check_Type(schema_str, T_STRING); parser = xmlSchemaNewMemParserCtxt(StringValuePtr(schema_str), strlen(StringValuePtr(schema_str))); rxschema = ALLOC(ruby_xml_schema); rxschema->schema = xmlSchemaParse(parser); xmlSchemaFreeParserCtxt(parser); return( Data_Wrap_Struct(cXMLSchema, ruby_xml_schema_mark, ruby_xml_schema_free, rxschema) ); default: rb_raise(rb_eArgError, "wrong number of arguments (need 1)"); } return Qnil; } /* TODO what is this patch doing here? xmlSchemaParserCtxtPtr parser; xmlSchemaPtr sptr; xmlSchemaValidCtxtPtr vptr; + int is_invalid; if (zend_parse_parameters(ZEND_NUM_ARGS() TSRMLS_CC, "z", &source) == FAILURE) { return; @@ -598,26 +598,24 @@ convert_to_string_ex(&source); parser = xmlSchemaNewParserCtxt(Z_STRVAL_P(source)); sptr = xmlSchemaParse(parser); break; case SCHEMA_BLOB: convert_to_string_ex(&source); parser = xmlSchemaNewMemParserCtxt(Z_STRVAL_P(source), Z_STRLEN_P(source)); sptr = xmlSchemaParse(parser); break; } vptr = xmlSchemaNewValidCtxt(sptr); + is_invalid = xmlSchemaValidateDoc(vptr, (xmlDocPtr) sxe->document->ptr); xmlSchemaFree(sptr); xmlSchemaFreeValidCtxt(vptr); xmlSchemaFreeParserCtxt(parser); - if (is_valid) { - RETURN_TRUE; - } else { + if (is_invalid) { RETURN_FALSE; + } else { + RETURN_TRUE; } } }}} @@ -695,7 +693,7 @@ { if (!strcmp(method, "xsearch")) { simplexml_ce_xpath_search(INTERNAL_FUNCTION_PARAM_PASSTHRU); -#ifdef xmlSchemaParserCtxtPtr +#ifdef LIBXML_SCHEMAS_ENABLED } else if (!strcmp(method, "validate_schema_file")) { simplexml_ce_schema_validate(INTERNAL_FUNCTION_PARAM_PASSTHRU, SCHEMA_FILE); } else if (!strcmp(method, "validate_schema_buffer")) { */ void ruby_schema_free(ruby_xml_schema *rxs) { } // Rdoc needs to know #ifdef RDOC_NEVER_DEFINED mXML = rb_define_module("XML"); #endif void ruby_init_xml_schema(void) { cXMLSchema = rb_define_class_under(mXML, "Schema", rb_cObject); rb_define_singleton_method(cXMLSchema, "new", ruby_xml_schema_init_from_uri, -1); rb_define_singleton_method(cXMLSchema, "from_string", ruby_xml_schema_init_from_str, -1); } ruby-mkrf-0.2.3.orig/test/sample_files/libxml-ruby-0.3.8/ext/xml/ruby_xml_dtd.h0000644000000000000000000000101311672453175025676 0ustar rootroot#ifndef __RUBY_XML_DTD__ #define __RUBY_XML_DTD__ extern VALUE cXMLDtd; typedef struct rxp_dtd { xmlDtdPtr dtd; /* DTD interface */ //int data_type; /* The data type referenced by *data */ //void *data; /* Pointer to an external structure of options */ //int is_ptr; /* Determines if this object owns its data or points to it someplace else */ //VALUE xmlver; /* T_STRING with the xml version */ } ruby_xml_dtd; void ruby_init_xml_dtd(void); void ruby_dtd_free(ruby_xml_dtd *rxdtd); #endif ruby-mkrf-0.2.3.orig/test/sample_files/libxml-ruby-0.3.8/ext/xml/ruby_xml_schema.h0000644000000000000000000000052011672453175026365 0ustar rootroot#ifndef __RUBY_XML_SCHEMA__ #define __RUBY_XML_SCHEMA__ #include #include extern VALUE cXMLSchema; typedef struct rxp_schema { xmlSchemaPtr schema; /* Schema interface */ } ruby_xml_schema; void ruby_init_xml_schema(void); void ruby_schema_free(ruby_xml_schema *rxs); #endif ruby-mkrf-0.2.3.orig/test/sample_files/libxml-ruby-0.3.8/ext/xml/ruby_xml_xpointer_context.h0000644000000000000000000000074011672453175030545 0ustar rootroot/* $Id: ruby_xml_xpointer_context.h,v 1.1 2006/02/21 20:40:16 roscopeco Exp $ */ /* Please see the LICENSE file for copyright and distribution information */ #ifndef __RUBY_XML_XPOINTER_CONTEXT__ #define __RUBY_XML_XPOINTER_CONTEXT__ extern VALUE cXMLXPointerContext; extern VALUE eXMLXPointerContextInvalidPath; typedef struct ruby_xml_xpointer_context { VALUE xd; xmlXPathContextPtr ctxt; } ruby_xml_xpointer_context; void ruby_init_xml_xpointer_context(void); #endif ruby-mkrf-0.2.3.orig/test/sample_files/libxml-ruby-0.3.8/ext/xml/ruby_xml_node_set.c0000644000000000000000000001353511672453175026732 0ustar rootroot/* $Id: ruby_xml_node_set.c,v 1.3 2006/04/14 14:45:25 roscopeco Exp $ */ /* Please see the LICENSE file for copyright and distribution information */ #include "libxml.h" #include "ruby_xml_node_set.h" /* * Document-class: XML::Node::Set * * Includes Enumerable. */ VALUE cXMLNodeSet; // TODO maybe we should support [] on nodeset? // Would also give us on xpath too... /* * call-seq: * nodeset.to_a => [node, ..., node] * * Obtain an array of the nodes in this set. */ VALUE ruby_xml_node_set_to_a(VALUE self) { int i; ruby_xml_node_set *rxnset; VALUE nodeobj, set_ary; Data_Get_Struct(self, ruby_xml_node_set, rxnset); set_ary = rb_ary_new(); if (!((rxnset->node_set == NULL) || (rxnset->node_set->nodeNr == 0))) { for (i = 0; i < rxnset->node_set->nodeNr; i++) { nodeobj = ruby_xml_node_new2(cXMLNode, rxnset->xd, rxnset->node_set->nodeTab[i]); rb_ary_push(set_ary, nodeobj); } } return(set_ary); } /* * call-seq: * nodeset.each { |node| ... } => self * * Call the supplied block for each node in this set. */ VALUE ruby_xml_node_set_each(VALUE self) { int i; ruby_xml_node_set *rxnset; VALUE nodeobj; Data_Get_Struct(self, ruby_xml_node_set, rxnset); if (rxnset->node_set == NULL) return(Qnil); for (i = 0; i < rxnset->node_set->nodeNr; i++) { switch(rxnset->node_set->nodeTab[i]->type) { case XML_ATTRIBUTE_NODE: nodeobj = ruby_xml_attr_new2(cXMLAttr, rxnset->xd, (xmlAttrPtr)rxnset->node_set->nodeTab[i]); break; default: nodeobj = ruby_xml_node_new2(cXMLNode, rxnset->xd, rxnset->node_set->nodeTab[i]); } rb_yield(nodeobj); } return(self); } /* * call-seq: * nodeset.empty? => (true|false) * * Determine whether this nodeset is empty (contains no nodes). */ VALUE ruby_xml_node_set_empty_q(VALUE self) { ruby_xml_node_set *rxnset; Data_Get_Struct(self, ruby_xml_node_set, rxnset); return ( rxnset->node_set == NULL || rxnset->node_set->nodeNr <= 0 ) ? Qtrue : Qfalse; } /* * call-seq: * nodeset.first => node * * Returns the first node in this node set, or nil if none exist. */ VALUE ruby_xml_node_set_first(VALUE self) { ruby_xml_node_set *rxnset; VALUE nodeobj; Data_Get_Struct(self, ruby_xml_node_set, rxnset); if (rxnset->node_set == NULL || rxnset->node_set->nodeNr < 1) return(Qnil); switch(rxnset->node_set->nodeTab[0]->type) { case XML_ATTRIBUTE_NODE: nodeobj = ruby_xml_attr_new2(cXMLAttr, rxnset->xd, (xmlAttrPtr)rxnset->node_set->nodeTab[0]); break; default: nodeobj = ruby_xml_node_new2(cXMLNode, rxnset->xd, rxnset->node_set->nodeTab[0]); } return(nodeobj); } void ruby_xml_node_set_free(ruby_xml_node_set *rxnset) { void *data; switch(rxnset->data_type) { case RUBY_LIBXML_SRC_TYPE_NULL: break; case RUBY_LIBXML_SRC_TYPE_XPATH: data = (void*)(rx_xpath_data *)rxnset->data; free((rx_xpath_data *)data); default: rb_fatal("Unknown data type, %d", rxnset->data_type); } /* Don't need to free the node set because the nodeset is a child of the XPath object that created the set. if (rxnset->node_set != NULL) xmlXPathFreeNodeSet(rxnset->node_set); */ free(rxnset); } /* * call-seq: * nodeset.length => num * * Obtain the length of this nodeset. */ VALUE ruby_xml_node_set_length(VALUE self) { ruby_xml_node_set *rxnset; Data_Get_Struct(self, ruby_xml_node_set, rxnset); if (rxnset->node_set == NULL) return(Qnil); else return(INT2NUM(rxnset->node_set->nodeNr)); } static void ruby_xml_node_set_mark(ruby_xml_node_set *rxnset) { if (rxnset == NULL) return; if (!NIL_P(rxnset->xd)) rb_gc_mark(rxnset->xd); if (!NIL_P(rxnset->xpath)) rb_gc_mark(rxnset->xpath); } VALUE ruby_xml_node_set_new(VALUE class, VALUE xd, VALUE xpath, xmlNodeSetPtr node_set) { ruby_xml_node_set *rxnset; rxnset = ALLOC(ruby_xml_node_set); rxnset->node_set = node_set; rxnset->data = NULL; rxnset->data_type = RUBY_LIBXML_SRC_TYPE_NULL; rxnset->xd = xd; rxnset->xpath = xpath; return(Data_Wrap_Struct(class, ruby_xml_node_set_mark, ruby_xml_node_set_free, rxnset)); } VALUE ruby_xml_node_set_new2(VALUE xd, VALUE xpath, xmlNodeSetPtr node_set) { return(ruby_xml_node_set_new(cXMLNodeSet, xd, xpath, node_set)); } /* * call-seq: * nodeset.xpath => xpath * * Obtain the xpath corresponding to this nodeset, if any. */ VALUE ruby_xml_node_set_xpath_get(VALUE self) { ruby_xml_node_set *rxnset; Data_Get_Struct(self, ruby_xml_node_set, rxnset); if (NIL_P(rxnset->xpath)) return(Qnil); else return(rxnset->xpath); } /* * call-seq: * nodeset.xpath_ctxt => context * * Return the xpath context corresponding to this nodeset, * if any. */ VALUE ruby_xml_node_set_xpath_data_get(VALUE self) { ruby_xml_node_set *rxnset; rx_xpath_data *rxxpd; Data_Get_Struct(self, ruby_xml_node_set, rxnset); if (rxnset->data_type != RUBY_LIBXML_SRC_TYPE_XPATH) return(Qnil); rxxpd = (rx_xpath_data *)rxnset->data; return(rxxpd->ctxt); } // Rdoc needs to know #ifdef RDOC_NEVER_DEFINED mXML = rb_define_module("XML"); cXMLNode = rb_define_class_under(mXML, "Node", rb_cObject); #endif void ruby_init_xml_node_set(void) { cXMLNodeSet = rb_define_class_under(cXMLNode, "Set", rb_cObject); rb_include_module(cXMLNodeSet, rb_const_get(rb_cObject, rb_intern("Enumerable"))); rb_define_method(cXMLNodeSet, "each", ruby_xml_node_set_each, 0); rb_define_method(cXMLNodeSet, "empty?", ruby_xml_node_set_empty_q, 0); rb_define_method(cXMLNodeSet, "first", ruby_xml_node_set_first, 0); rb_define_method(cXMLNodeSet, "length", ruby_xml_node_set_length, 0); rb_define_method(cXMLNodeSet, "to_a", ruby_xml_node_set_to_a, 0); rb_define_method(cXMLNodeSet, "xpath", ruby_xml_node_set_xpath_get, 0); rb_define_method(cXMLNodeSet, "xpath_ctxt", ruby_xml_node_set_xpath_data_get, 0); } ruby-mkrf-0.2.3.orig/test/sample_files/libxml-ruby-0.3.8/ext/xml/ruby_xml_xpointer.c0000644000000000000000000000551711672453175027003 0ustar rootroot/* $Id: ruby_xml_xpointer.c,v 1.1 2006/02/21 20:40:16 roscopeco Exp $ */ /* Please see the LICENSE file for copyright and distribution information */ #include "libxml.h" #include "ruby_xml_xpointer.h" VALUE cXMLXPointer; VALUE eXMLXPointerInvalidExpression; VALUE ruby_xml_xpointer_point(VALUE class, VALUE rnode, VALUE xptr_str) { #ifdef LIBXML_XPTR_ENABLED ruby_xml_node *node; ruby_xml_xpath_context *xxpc; VALUE rxptr_xpth_ctxt, rxxp; xmlXPathObjectPtr xpath; Check_Type(xptr_str, T_STRING); if (rb_obj_is_kind_of(rnode, cXMLNode) == Qfalse) rb_raise(rb_eTypeError, "require an XML::Node object"); Data_Get_Struct(rnode, ruby_xml_node, node); rxptr_xpth_ctxt = ruby_xml_xpath_context_new(cXMLXPathContext, node->xd, xmlXPtrNewContext(node->node->doc, node->node, NULL)); if (NIL_P(rxptr_xpth_ctxt)) return(Qnil); Data_Get_Struct(rxptr_xpth_ctxt, ruby_xml_xpath_context, xxpc); xpath = xmlXPtrEval((xmlChar*)StringValuePtr(xptr_str), xxpc->ctxt); if (xpath == NULL) rb_raise(eXMLXPointerInvalidExpression, "invalid xpointer expression"); rxxp = ruby_xml_xpath_new(cXMLXPath, node->xd, rxptr_xpth_ctxt, xpath); return(rxxp); #else rb_warn("libxml was compiled without XPointer support"); return(Qfalse); #endif } VALUE ruby_xml_xpointer_point2(VALUE node, VALUE xptr_str) { return(ruby_xml_xpointer_point(cXMLXPointer, node, xptr_str)); } /* * call-seq: * XML::XPointer.range(start_node, end_node) => xpath * * Create an xpath representing the range between the supplied * start and end node. */ VALUE ruby_xml_xpointer_range(VALUE class, VALUE rstart, VALUE rend) { #ifdef LIBXML_XPTR_ENABLED ruby_xml_node *start, *end; VALUE rxxp; xmlXPathObjectPtr xpath; if (rb_obj_is_kind_of(rstart, cXMLNode) == Qfalse) rb_raise(rb_eTypeError, "require an XML::Node object as a starting point"); if (rb_obj_is_kind_of(rend, cXMLNode) == Qfalse) rb_raise(rb_eTypeError, "require an XML::Node object as an ending point"); Data_Get_Struct(rstart, ruby_xml_node, start); if (start->node == NULL) return(Qnil); Data_Get_Struct(rend, ruby_xml_node, end); if (end->node == NULL) return(Qnil); xpath = xmlXPtrNewRangeNodes(start->node, end->node); if (xpath == NULL) rb_fatal("You shouldn't be able to have this happen"); rxxp = ruby_xml_xpath_new(cXMLXPath, start->xd, Qnil, xpath); return(rxxp); #else rb_warn("libxml was compiled without XPointer support"); return(Qfalse); #endif } // Rdoc needs to know #ifdef RDOC_NEVER_DEFINED mXML = rb_define_module("XML"); #endif void ruby_init_xml_xpointer(void) { cXMLXPointer = rb_define_class_under(mXML, "XPointer", rb_cObject); eXMLXPointerInvalidExpression = rb_define_class_under(cXMLXPointer, "InvalidExpression", rb_eException); rb_define_singleton_method(cXMLXPointer, "range", ruby_xml_xpointer_range, 2); } ruby-mkrf-0.2.3.orig/test/sample_files/libxml-ruby-0.3.8/ext/xml/ruby_xml_ns.h0000644000000000000000000000103311672453175025545 0ustar rootroot/* $Id: ruby_xml_ns.h,v 1.1 2006/02/21 20:40:16 roscopeco Exp $ */ /* Please see the LICENSE file for copyright and distribution information */ #ifndef __RUBY_XML_NS__ #define __RUBY_XML_NS__ extern VALUE cXMLNS; typedef struct ruby_xml_ns { xmlNsPtr ns; int is_ptr; VALUE xd; } ruby_xml_ns; void ruby_xml_ns_free(ruby_xml_ns *rxn); void ruby_init_xml_ns(void); VALUE ruby_xml_ns_new(VALUE class, VALUE xd, xmlNsPtr ns); VALUE ruby_xml_ns_new2(VALUE class, VALUE xd, xmlNsPtr ns); VALUE ruby_xml_ns_name_get(VALUE self); #endif ruby-mkrf-0.2.3.orig/test/sample_files/libxml-ruby-0.3.8/ext/xml/ruby_xml_attribute.c0000644000000000000000000001303411672453175027127 0ustar rootroot/* $Id: ruby_xml_attribute.c,v 1.1 2006/02/21 20:40:16 roscopeco Exp $ */ /* Please see the LICENSE file for copyright and distribution information */ #include "libxml.h" #include "ruby_xml_attribute.h" VALUE cXMLAttribute; // TODO Wtf is this about? It's not referenced outside this file AFAIK... VALUE ruby_xml_attribute_child_get(VALUE self) { ruby_xml_attribute *rxa; Data_Get_Struct(self, ruby_xml_attribute, rxa); if (rxa->attribute->children == NULL) return(Qnil); else return(ruby_xml_node_new2(cXMLNode, rxa->xd, rxa->attribute->children)); } VALUE ruby_xml_attribute_children_q(VALUE self) { ruby_xml_attribute *rxa; Data_Get_Struct(self, ruby_xml_attribute, rxa); if (rxa->attribute->children == NULL) return(Qfalse); else return(Qtrue); } VALUE ruby_xml_attribute_default_get(VALUE self) { ruby_xml_attribute *rxa; Data_Get_Struct(self, ruby_xml_attribute, rxa); if (rxa->attribute->defaultValue == NULL) return(Qnil); else return(rb_str_new2((const char*)rxa->attribute->defaultValue)); } VALUE ruby_xml_attribute_element_get(VALUE self) { ruby_xml_attribute *rxa; Data_Get_Struct(self, ruby_xml_attribute, rxa); if (rxa->attribute->elem == NULL) return(Qnil); else return(rb_str_new2((const char*)rxa->attribute->elem)); } void ruby_xml_attribute_free(ruby_xml_attribute *rxa) { if (rxa->attribute != NULL && !rxa->is_ptr) { xmlUnlinkNode((xmlNodePtr)rxa->attribute); xmlFreeNode((xmlNodePtr)rxa->attribute); rxa->attribute = NULL; } free(rxa); } VALUE ruby_xml_attribute_last_get(VALUE self) { ruby_xml_attribute *rxa; Data_Get_Struct(self, ruby_xml_attribute, rxa); if (rxa->attribute->last == NULL) return(Qnil); else return(ruby_xml_node_new2(cXMLNode, rxa->xd, rxa->attribute->last)); } VALUE ruby_xml_attribute_last_q(VALUE self) { ruby_xml_attribute *rxa; Data_Get_Struct(self, ruby_xml_attribute, rxa); if (rxa->attribute->last == NULL) return(Qfalse); else return(Qtrue); } static void ruby_xml_attribute_mark(ruby_xml_attribute *rxa) { if (rxa == NULL) return; if (!NIL_P(rxa->xd)) rb_gc_mark(rxa->xd); } VALUE ruby_xml_attribute_name_get(VALUE self) { ruby_xml_attribute *rxa; Data_Get_Struct(self, ruby_xml_attribute, rxa); if (rxa->attribute->name == NULL) return(Qnil); else return(rb_str_new2((const char*)rxa->attribute->name)); } VALUE ruby_xml_attribute_new(VALUE class, VALUE xd, xmlAttributePtr attribute) { ruby_xml_attribute *rxa; rxa = ALLOC(ruby_xml_attribute); rxa->is_ptr = 0; rxa->attribute = attribute; if (NIL_P(xd)) rxa->xd = Qnil; else rxa->xd = xd; return(Data_Wrap_Struct(class, ruby_xml_attribute_mark, ruby_xml_attribute_free, rxa)); } VALUE ruby_xml_attribute_new2(VALUE class, VALUE xd, xmlAttributePtr attribute) { ruby_xml_attribute *rxa; rxa = ALLOC(ruby_xml_attribute); rxa->is_ptr = 1; rxa->attribute = attribute; if (NIL_P(xd)) rxa->xd = Qnil; else rxa->xd = xd; return(Data_Wrap_Struct(class, ruby_xml_attribute_mark, ruby_xml_attribute_free, rxa)); } VALUE ruby_xml_attribute_next_get(VALUE self) { ruby_xml_attribute *rxa; Data_Get_Struct(self, ruby_xml_attribute, rxa); if (rxa->attribute->next == NULL) return(Qnil); else return(ruby_xml_node_new2(cXMLNode, rxa->xd, rxa->attribute->next)); } VALUE ruby_xml_attribute_next_q(VALUE self) { ruby_xml_attribute *rxa; Data_Get_Struct(self, ruby_xml_attribute, rxa); if (rxa->attribute->next == NULL) return(Qfalse); else return(Qtrue); } VALUE ruby_xml_attribute_node_type_name(VALUE self) { return(rb_str_new2("attribute")); } VALUE ruby_xml_attribute_prefix_get(VALUE self) { ruby_xml_attribute *rxa; Data_Get_Struct(self, ruby_xml_attribute, rxa); if (rxa->attribute->prefix == NULL) return(Qnil); else return(rb_str_new2((const char*)rxa->attribute->prefix)); } VALUE ruby_xml_attribute_prev_get(VALUE self) { ruby_xml_attribute *rxa; Data_Get_Struct(self, ruby_xml_attribute, rxa); if (rxa->attribute->prev == NULL) return(Qnil); else return(ruby_xml_node_new2(cXMLNode, rxa->xd, rxa->attribute->prev)); } VALUE ruby_xml_attribute_prev_q(VALUE self) { ruby_xml_attribute *rxa; Data_Get_Struct(self, ruby_xml_attribute, rxa); if (rxa->attribute->prev == NULL) return(Qfalse); else return(Qtrue); } // Rdoc maybe doesn't need to know // #ifdef RDOC_NEVER_DEFINED // mXML = rb_define_module("XML"); // #endif void ruby_init_xml_attribute(void) { cXMLAttribute = rb_define_class_under(mXML, "Attribute", rb_cObject); rb_define_method(cXMLAttribute, "child", ruby_xml_attribute_child_get, 0); rb_define_method(cXMLAttribute, "children?", ruby_xml_attribute_children_q, 0); rb_define_method(cXMLAttribute, "default", ruby_xml_attribute_default_get, 0); rb_define_method(cXMLAttribute, "element", ruby_xml_attribute_element_get, 0); rb_define_method(cXMLAttribute, "last", ruby_xml_attribute_last_get, 0); rb_define_method(cXMLAttribute, "last?", ruby_xml_attribute_last_q, 0); rb_define_method(cXMLAttribute, "node_type_name", ruby_xml_attribute_node_type_name, 0); rb_define_method(cXMLAttribute, "name", ruby_xml_attribute_name_get, 0); rb_define_method(cXMLAttribute, "next", ruby_xml_attribute_next_get, 0); rb_define_method(cXMLAttribute, "next?", ruby_xml_attribute_next_q, 0); rb_define_method(cXMLAttribute, "prefix", ruby_xml_attribute_prefix_get, 0); rb_define_method(cXMLAttribute, "prev", ruby_xml_attribute_prev_get, 0); rb_define_method(cXMLAttribute, "prev?", ruby_xml_attribute_prev_q, 0); } ruby-mkrf-0.2.3.orig/test/sample_files/libxml-ruby-0.3.8/ext/xml/ruby_xml_attribute.h0000644000000000000000000000123111672453175027130 0ustar rootroot/* $Id: ruby_xml_attribute.h,v 1.1 2006/02/21 20:40:16 roscopeco Exp $ */ /* Please see the LICENSE file for copyright and distribution information */ #ifndef __RUBY_XML_ATTRIBUTE__ #define __RUBY_XML_ATTRIBUTE__ extern VALUE cXMLAttribute; typedef struct ruby_xml_attribute { xmlAttributePtr attribute; VALUE xd; int is_ptr; } ruby_xml_attribute; void ruby_xml_attribute_free(ruby_xml_attribute *rxa); void ruby_init_xml_attribute(void); VALUE ruby_xml_attribute_new(VALUE class, VALUE xd, xmlAttributePtr attribute); VALUE ruby_xml_attribute_new2(VALUE class, VALUE xd, xmlAttributePtr attribute); VALUE ruby_xml_attribute_name_get(VALUE self); #endif ruby-mkrf-0.2.3.orig/test/sample_files/libxml-ruby-0.3.8/ext/xml/ruby_xml_sax_parser.c0000644000000000000000000002626211672453175027302 0ustar rootroot/* $Id: ruby_xml_sax_parser.c,v 1.4 2006/04/14 23:46:06 roscopeco Exp $ */ /* Please see the LICENSE file for copyright and distribution information */ #include "libxml.h" #include "ruby_xml_sax_parser.h" VALUE cXMLSaxParser; VALUE callsym; #include "sax_parser_callbacks.inc" void ruby_xml_sax_parser_free(ruby_xml_sax_parser *rxsp) { /* Apparently this isn't needed: time will tell */ /* if (rxsp->xsh != NULL) */ /* xmlFreeSax_Parser(rxsp->sax_parser); */ } #define mark_handler(rxsp, handler) \ if (rxsp->cbp->handler && (rxsp->cbp->handler != Qnil)) \ rb_gc_mark(rxsp->cbp->handler) void ruby_xml_sax_parser_mark(ruby_xml_sax_parser *rxsp) { mark_handler(rxsp, internalSubset); mark_handler(rxsp, isStandalone); mark_handler(rxsp, hasInternalSubset); mark_handler(rxsp, hasExternalSubset); mark_handler(rxsp, startDocument); mark_handler(rxsp, endDocument); mark_handler(rxsp, startElement); mark_handler(rxsp, endElement); mark_handler(rxsp, reference); mark_handler(rxsp, characters); mark_handler(rxsp, processingInstruction); mark_handler(rxsp, comment); mark_handler(rxsp, xmlParserWarning); mark_handler(rxsp, xmlParserError); mark_handler(rxsp, xmlParserFatalError); mark_handler(rxsp, cdataBlock); } /* * call-seq: * XML::SaxParser.new => sax_parser * * Create a new XML::SaxParser instance. */ VALUE ruby_xml_sax_parser_new(VALUE class) { ruby_xml_sax_parser *rxsp; rxsp = ALLOC(ruby_xml_sax_parser); rxsp->cbp = ALLOC(ruby_xml_sax_parser_callbacks); memset(rxsp->cbp, 0, sizeof(ruby_xml_sax_parser_callbacks)); rxsp->xsh = &rubySAXHandlerStruct; rxsp->xpc = NULL; rxsp->filename = Qnil; rxsp->str = Qnil; return(Data_Wrap_Struct(class, ruby_xml_sax_parser_mark, ruby_xml_sax_parser_free, rxsp)); } /* * call-seq: * sax_parser.filename => "filename" * * Obtain the filename this parser reads from. */ VALUE ruby_xml_sax_parser_filename_get(VALUE self) { ruby_xml_sax_parser *rxsp; Data_Get_Struct(self, ruby_xml_sax_parser, rxsp); return(rxsp->filename); } /* * call-seq: * sax_parser.filename = "filename" * * Set the filename this parser reads from. */ VALUE ruby_xml_sax_parser_filename_set(VALUE self, VALUE filename) { ruby_xml_sax_parser *rxsp; Check_Type(filename, T_STRING); Data_Get_Struct(self, ruby_xml_sax_parser, rxsp); rxsp->filename = filename; return(rxsp->filename); } #define set_handler(self, argc, argv, handler) \ VALUE proc; \ rb_scan_args(argc, argv, "0&", &proc); \ ruby_xml_sax_parser *rxsp; \ Data_Get_Struct(self, ruby_xml_sax_parser, rxsp); \ rxsp->cbp->handler = proc; \ return(Qnil); /* * call-seq: * parser.on_internal_subset { |name, external_id, system_id| ... } => nil * * Set the callback block for an internal subset event. */ VALUE ruby_xml_sax_parser_on_internal_subset(int argc, VALUE *argv, VALUE self) { set_handler(self, argc, argv, internalSubset); } /* * call-seq: * parser.on_is_standalone { || ... } => nil * * Set the callback proc for 'is standalone' event. */ VALUE ruby_xml_sax_parser_on_is_standalone(int argc, VALUE *argv, VALUE self) { set_handler(self, argc, argv, isStandalone); } /* * call-seq: * parser.on_has_internal_subset { || ... } => nil * * Set the callback proc for an internal subset notification event. */ VALUE ruby_xml_sax_parser_on_has_internal_subset(int argc, VALUE *argv, VALUE self) { set_handler(self, argc, argv, hasInternalSubset); } /* * call-seq: * parser.on_has_external_subset { || ... } => nil * * Set the callback proc for an external subset notification event. */ VALUE ruby_xml_sax_parser_on_has_external_subset(int argc, VALUE *argv, VALUE self) { set_handler(self, argc, argv, hasExternalSubset); } /* * call-seq: * parser.on_start_document { || ... } => nil * * Set the callback proc for a start document event. */ VALUE ruby_xml_sax_parser_on_start_document(int argc, VALUE *argv, VALUE self) { set_handler(self, argc, argv, startDocument); } /* * call-seq: * parser.on_end_document { || ... } => nil * * Set the callback proc for an end document event. */ VALUE ruby_xml_sax_parser_on_end_document(int argc, VALUE *argv, VALUE self) { set_handler(self, argc, argv, endDocument); } /* * call-seq: * parser.on_start_element { |name, attr_hash| ... } => nil * * Set the callback proc for an element start event. */ VALUE ruby_xml_sax_parser_on_start_element(int argc, VALUE *argv, VALUE self) { set_handler(self, argc, argv, startElement); } /* * call-seq: * parser.on_end_element { |name| ... } => nil * * Set the callback proc for an element end event. */ VALUE ruby_xml_sax_parser_on_end_element(int argc, VALUE *argv, VALUE self) { set_handler(self, argc, argv, endElement); } /* * call-seq: * parser.on_reference { |name| ... } => nil * * Set the callback proc for a reference event. */ VALUE ruby_xml_sax_parser_on_reference(int argc, VALUE *argv, VALUE self) { set_handler(self, argc, argv, reference); } /* * call-seq: * parser.on_characters { |chars| ... } => nil * * Set the callback proc for a characters event. */ VALUE ruby_xml_sax_parser_on_characters(int argc, VALUE *argv, VALUE self) { set_handler(self, argc, argv, characters); } /* * call-seq: * parser.on_processing_instruction { |target, data| ... } => nil * * Set the callback proc for an processing instruction event. */ VALUE ruby_xml_sax_parser_on_processing_instruction(int argc, VALUE *argv, VALUE self) { set_handler(self, argc, argv, processingInstruction); } /* * call-seq: * parser.on_comment { |msg| ... } => nil * * Set the callback proc for a comment event. */ VALUE ruby_xml_sax_parser_on_comment(int argc, VALUE *argv, VALUE self) { set_handler(self, argc, argv, comment); } /* * call-seq: * parser.on_parser_warning { |msg| ... } => nil * * Set the callback proc that receives parser warnings. */ VALUE ruby_xml_sax_parser_on_parser_warning(int argc, VALUE *argv, VALUE self) { set_handler(self, argc, argv, xmlParserWarning); } /* * call-seq: * parser.on_parser_error { |msg| ... } => nil * * Set the callback proc that receives parser errors. */ VALUE ruby_xml_sax_parser_on_parser_error(int argc, VALUE *argv, VALUE self) { set_handler(self, argc, argv, xmlParserError); } /* * call-seq: * parser.on_parser_fatal_error { |msg| ... } => nil * * Set the callback proc that receives fatal parser errors. */ VALUE ruby_xml_sax_parser_on_parser_fatal_error(int argc, VALUE *argv, VALUE self) { set_handler(self, argc, argv, xmlParserFatalError); } /* * call-seq: * parser.on_cdata_block { |cdata| ... } => nil * * Set the callback proc for a CDATA block event. */ VALUE ruby_xml_sax_parser_on_cdata_block(int argc, VALUE *argv, VALUE self) { set_handler(self, argc, argv, cdataBlock); } /* * call-seq: * parser.on_external_subset { |name, external_id, system_id| ... } => nil * * Set the callback proc for an external subset event. */ VALUE ruby_xml_sax_parser_on_external_subset(int argc, VALUE *argv, VALUE self) { set_handler(self, argc, argv, externalSubset); } /* * call-seq: * parser.parse => (true|false) * * Parse the input XML, generating callbacks to the procs * registered with the parser (via the on_xxxx attributes). */ VALUE ruby_xml_sax_parser_parse(VALUE self) { char *str; int status = 1; ruby_xml_sax_parser *rxsp; Data_Get_Struct(self, ruby_xml_sax_parser, rxsp); if (rxsp->filename != Qnil) { status = xmlSAXUserParseFile(rxsp->xsh, rxsp->cbp, StringValuePtr(rxsp->filename)); } else if (rxsp->str != Qnil) { str = StringValuePtr(rxsp->str); status = //ruby_xml_document_new(cXMLDocument, xmlSAXUserParseMemory(rxsp->xsh, rxsp->cbp, str, strlen(str)); //); } /* XXX This should return an exception for the various error codes * that can come back in status, but I'm too lazy to do that right * now. */ if (status) return(Qfalse); else return(Qtrue); } /* * call-seq: * parser.string => "xml" * * Obtain the parser's input string. */ VALUE ruby_xml_sax_parser_str_get(VALUE self) { ruby_xml_sax_parser *rxsp; Data_Get_Struct(self, ruby_xml_sax_parser, rxsp); return(rxsp->str); } /* * call-seq: * parser.string = "xml" * * Set the parser's input string. */ VALUE ruby_xml_sax_parser_str_set(VALUE self, VALUE str) { ruby_xml_sax_parser *rxsp; Check_Type(str, T_STRING); Data_Get_Struct(self, ruby_xml_sax_parser, rxsp); rxsp->str = str; return(rxsp->str); } // Rdoc needs to know #ifdef RDOC_NEVER_DEFINED mXML = rb_define_module("XML"); #endif void ruby_init_xml_sax_parser(void) { cXMLSaxParser = rb_define_class_under(mXML, "SaxParser", rb_cObject); callsym = rb_intern("call"); rb_define_singleton_method(cXMLSaxParser, "new", ruby_xml_sax_parser_new, 0); rb_define_method(cXMLSaxParser, "filename", ruby_xml_sax_parser_filename_get, 0); rb_define_method(cXMLSaxParser, "filename=", ruby_xml_sax_parser_filename_set, 1); rb_define_method(cXMLSaxParser, "parse", ruby_xml_sax_parser_parse, 0); rb_define_method(cXMLSaxParser, "string", ruby_xml_sax_parser_str_get, 0); rb_define_method(cXMLSaxParser, "string=", ruby_xml_sax_parser_str_set, 1); rb_define_method(cXMLSaxParser, "on_internal_subset", ruby_xml_sax_parser_on_internal_subset, -1); rb_define_method(cXMLSaxParser, "on_is_standalone", ruby_xml_sax_parser_on_is_standalone, -1); rb_define_method(cXMLSaxParser, "on_has_internal_subset", ruby_xml_sax_parser_on_has_internal_subset, -1); rb_define_method(cXMLSaxParser, "on_has_external_subset", ruby_xml_sax_parser_on_has_external_subset, -1); rb_define_method(cXMLSaxParser, "on_start_document", ruby_xml_sax_parser_on_start_document, -1); rb_define_method(cXMLSaxParser, "on_end_document", ruby_xml_sax_parser_on_end_document, -1); rb_define_method(cXMLSaxParser, "on_start_element", ruby_xml_sax_parser_on_start_element, -1); rb_define_method(cXMLSaxParser, "on_end_element", ruby_xml_sax_parser_on_end_element, -1); rb_define_method(cXMLSaxParser, "on_reference", ruby_xml_sax_parser_on_reference, -1); rb_define_method(cXMLSaxParser, "on_characters", ruby_xml_sax_parser_on_characters, -1); rb_define_method(cXMLSaxParser, "on_processing_instruction", ruby_xml_sax_parser_on_processing_instruction, -1); rb_define_method(cXMLSaxParser, "on_comment", ruby_xml_sax_parser_on_comment, -1); rb_define_method(cXMLSaxParser, "on_parser_warning", ruby_xml_sax_parser_on_parser_warning, -1); rb_define_method(cXMLSaxParser, "on_parser_error", ruby_xml_sax_parser_on_parser_error, -1); rb_define_method(cXMLSaxParser, "on_parser_fatal_error", ruby_xml_sax_parser_on_parser_fatal_error, -1); rb_define_method(cXMLSaxParser, "on_cdata_block", ruby_xml_sax_parser_on_cdata_block, -1); rb_define_method(cXMLSaxParser, "on_external_subset", ruby_xml_sax_parser_on_external_subset, -1); } ruby-mkrf-0.2.3.orig/test/sample_files/libxml-ruby-0.3.8/ext/xml/ruby_xml_parser.h0000644000000000000000000000166611672453175026435 0ustar rootroot/* $Id: ruby_xml_parser.h,v 1.1 2006/02/21 20:40:16 roscopeco Exp $ */ /* Please see the LICENSE file for copyright and distribution information */ #ifndef __RUBY_XML_PARSER__ #define __RUBY_XML_PARSER__ #define MAX_LIBXML_FEATURES_LEN 50 extern int ruby_xml_parser_count; extern VALUE cXMLParser; extern VALUE eXMLParserParseError; typedef struct ruby_xml_parser { VALUE ctxt; int parsed; void *data; int data_type; } ruby_xml_parser; VALUE ruby_xml_parser_default_load_external_dtd_set(VALUE class, VALUE bool); VALUE ruby_xml_parser_default_substitute_entities_set(VALUE class, VALUE bool); VALUE ruby_xml_parser_features(VALUE self); VALUE ruby_xml_parser_filename_get(VALUE self); VALUE ruby_xml_parser_filename_set(VALUE self, VALUE filename); VALUE ruby_xml_parser_new(VALUE class); VALUE ruby_xml_parser_parse(VALUE self); VALUE ruby_xml_parser_str_get(VALUE self); VALUE ruby_xml_parser_str_set(VALUE self, VALUE str); #endif ruby-mkrf-0.2.3.orig/test/sample_files/libxml-ruby-0.3.8/ext/xml/ruby_xml_attr.h0000644000000000000000000000107711672453175026107 0ustar rootroot/* $Id: ruby_xml_attr.h,v 1.1 2006/02/21 20:40:16 roscopeco Exp $ */ /* Please see the LICENSE file for copyright and distribution information */ #ifndef __RUBY_XML_ATTR__ #define __RUBY_XML_ATTR__ extern VALUE cXMLAttr; typedef struct ruby_xml_attr { xmlAttrPtr attr; VALUE xd; int is_ptr; } ruby_xml_attr; void ruby_xml_attr_free(ruby_xml_attr *rxn); void ruby_init_xml_attr(void); VALUE ruby_xml_attr_new(VALUE class, VALUE xd, xmlAttrPtr attr); VALUE ruby_xml_attr_new2(VALUE class, VALUE xd, xmlAttrPtr attr); VALUE ruby_xml_attr_name_get(VALUE self); #endif ruby-mkrf-0.2.3.orig/test/sample_files/libxml-ruby-0.3.8/ext/xml/ruby_xml_node.h0000644000000000000000000000160511672453175026057 0ustar rootroot/* $Id: ruby_xml_node.h,v 1.1 2006/02/21 20:40:16 roscopeco Exp $ */ /* Please see the LICENSE file for copyright and distribution information */ #ifndef __RUBY_XML_NODE__ #define __RUBY_XML_NODE__ extern VALUE cXMLNode; extern VALUE eXMLNodeSetNamespace; extern VALUE eXMLNodeFailedModify; extern VALUE eXMLNodeUnknownType; typedef struct ruby_xml_node { xmlNodePtr node; VALUE xd; int is_ptr; } ruby_xml_node; void ruby_xml_node_free(ruby_xml_node *rxn); void ruby_init_xml_node(void); VALUE ruby_xml_node_child_set(VALUE self, VALUE obj); VALUE ruby_xml_node_new(VALUE class, xmlNodePtr node); VALUE ruby_xml_node_new2(VALUE class, VALUE xd, xmlNodePtr node); VALUE ruby_xml_node_name_get(VALUE self); VALUE ruby_xml_node_property_get(VALUE self, VALUE key); VALUE ruby_xml_node_property_set(VALUE self, VALUE key, VALUE val); VALUE ruby_xml_node_set_ptr(VALUE node, int is_ptr); #endif ruby-mkrf-0.2.3.orig/test/sample_files/libxml-ruby-0.3.8/ext/xml/ruby_xml_xinclude.h0000644000000000000000000000047411672453175026750 0ustar rootroot/* $Id: ruby_xml_xinclude.h,v 1.1 2006/02/21 20:40:16 roscopeco Exp $ */ /* Please see the LICENSE file for copyright and distribution information */ #ifndef __RUBY_XML_XINCLUDE__ #define __RUBY_XML_XINCLUDE__ extern VALUE cXMLXInclude; extern VALUE eXMLXIncludeError; void ruby_init_xml_xinclude(void); #endif ruby-mkrf-0.2.3.orig/test/sample_files/libxml-ruby-0.3.8/ext/xml/ruby_xml_tree.h0000644000000000000000000000041011672453175026062 0ustar rootroot/* $Id: ruby_xml_tree.h,v 1.1 2006/02/21 20:40:16 roscopeco Exp $ */ /* Please see the LICENSE file for copyright and distribution information */ #ifndef __RUBY_XML_TREE__ #define __RUBY_XML_TREE__ extern VALUE cXMLTree; void ruby_init_xml_tree(void); #endif ruby-mkrf-0.2.3.orig/test/sample_files/libxml-ruby-0.3.8/ext/xml/ruby_xml_xpath_context.c0000644000000000000000000000552411672453175030021 0ustar rootroot/* $Id: ruby_xml_xpath_context.c,v 1.2 2006/02/27 12:55:32 roscopeco Exp $ */ /* Please see the LICENSE file for copyright and distribution information */ #include "libxml.h" #include "ruby_xml_xpath_context.h" VALUE cXMLXPathContext; /* * call-seq: * context.doc => document * * Obtain the XML::Document associated with this XPath. */ VALUE ruby_xml_xpath_context_doc_get(VALUE self) { ruby_xml_xpath_context *rxxpc; Data_Get_Struct(self, ruby_xml_xpath_context, rxxpc); return(rxxpc->xd); } void ruby_xml_xpath_context_free(ruby_xml_xpath_context *rxxpc) { if (rxxpc->ctxt != NULL) { xmlXPathFreeContext(rxxpc->ctxt); rxxpc->ctxt = NULL; } free(rxxpc); } void ruby_xml_xpath_context_mark(ruby_xml_xpath_context *rxxpc) { if (rxxpc == NULL) return; if (!NIL_P(rxxpc->xd)) rb_gc_mark(rxxpc->xd); } VALUE ruby_xml_xpath_context_new(VALUE class, VALUE xd, xmlXPathContextPtr xxpc) { ruby_xml_xpath_context *rxxpc; rxxpc = ALLOC(ruby_xml_xpath_context); rxxpc->ctxt = xxpc; rxxpc->xd = xd; return(Data_Wrap_Struct(class, ruby_xml_xpath_context_mark, ruby_xml_xpath_context_free, rxxpc)); } VALUE ruby_xml_xpath_context_new2(VALUE xd, xmlXPathContextPtr xxpc) { return(ruby_xml_xpath_context_new(cXMLXPathContext, xd, xxpc)); } VALUE ruby_xml_xpath_context_new3(VALUE xd) { ruby_xml_document *rxd; xmlXPathContextPtr ctxt; Data_Get_Struct(xd, ruby_xml_document, rxd); if (rxd->doc == NULL) return(Qnil); ctxt = xmlXPathNewContext(rxd->doc); if (ctxt == NULL) return(Qnil); return(ruby_xml_xpath_context_new2(xd, ctxt)); } VALUE ruby_xml_xpath_context_new4(VALUE rnode) { ruby_xml_node *node; Data_Get_Struct(rnode, ruby_xml_node, node); return(ruby_xml_xpath_context_new3(node->xd)); } /* * call-seq: * context.register_namespace(prefix, uri) => (true|false) * * Register the specified namespace URI with the specified prefix * in this context. */ VALUE ruby_xml_xpath_context_register_namespace(VALUE self, VALUE prefix, VALUE uri) { ruby_xml_xpath_context *rxxpc; Data_Get_Struct(self, ruby_xml_xpath_context, rxxpc); if (xmlXPathRegisterNs(rxxpc->ctxt, (xmlChar*)StringValuePtr(prefix), (xmlChar*)StringValuePtr(uri)) == 0) { return(Qtrue); } else { /* Should raise an exception, IMHO */ return(Qfalse); } } // Rdoc needs to know #ifdef RDOC_NEVER_DEFINED mXML = rb_define_module("XML"); cXMLXPath = rb_define_class_under(mXML, "XPath", rb_cObject); #endif void ruby_init_xml_xpath_context(void) { cXMLXPathContext = rb_define_class_under(cXMLXPath, "Context", rb_cObject); rb_define_method(cXMLXPathContext, "register_namespace", ruby_xml_xpath_context_register_namespace, 2); rb_define_method(cXMLXPathContext, "doc", ruby_xml_xpath_context_doc_get, 0); } ruby-mkrf-0.2.3.orig/test/sample_files/libxml-ruby-0.3.8/ext/xml/ruby_xml_input_cbg.h0000644000000000000000000000053611672453175027106 0ustar rootroot#ifndef _INPUT_CBG_ #define _INPUT_CBG_ void ruby_init_input_callbacks(void); typedef struct ic_doc_context { char *buffer; char *bpos; int remaining; } ic_doc_context; typedef struct ic_scheme { char *scheme_name; //MUFF char *class; VALUE class; int name_len; struct ic_scheme *next_scheme; } ic_scheme; #endif ruby-mkrf-0.2.3.orig/test/sample_files/libxml-ruby-0.3.8/ext/xml/ruby_xml_xpointer.h0000644000000000000000000000127011672453175027000 0ustar rootroot/* $Id: ruby_xml_xpointer.h,v 1.1 2006/02/21 20:40:16 roscopeco Exp $ */ /* Please see the LICENSE file for copyright and distribution information */ #ifndef __RUBY_XML_XPOINTER__ #define __RUBY_XML_XPOINTER__ extern VALUE cXMLXPointer; extern VALUE eXMLXPointerInvalidExpression; typedef struct ruby_xml_xpointer { VALUE xd; VALUE ctxt; /* * This needs to go into a xpointer data struct: * * xmlLocationSetPtr xptr; * * I also need an xpointer data struct type. */ } ruby_xml_xpointer; VALUE ruby_xml_xpointer_point(VALUE class, VALUE node, VALUE xptr_string); VALUE ruby_xml_xpointer_point2(VALUE node, VALUE xptr_string); void ruby_init_xml_xpointer(void); #endif ruby-mkrf-0.2.3.orig/test/sample_files/libxml-ruby-0.3.8/ext/xml/extconf.rb0000644000000000000000000000236011672453175025032 0ustar rootrootrequire '../../../../../lib/mkrf' def crash(str) printf(" extconf failure: %s\n", str) exit 1 end Mkrf::Generator.new('libxml_so', '*.c') do |g| g.include_library('socket','socket') g.include_library('nsl','gethostbyname') unless g.include_library('z', 'inflate') crash('need zlib') else g.add_define('HAVE_ZLIB_H') end unless g.include_library('iconv','iconv_open') or g.include_library('c','iconv_open') or g.include_library('recode','iconv_open') or g.include_library('iconv') crash(<<-EOL) need libiconv. Install the libiconv or try passing one of the following options to extconf.rb: --with-iconv-dir=/path/to/iconv --with-iconv-lib=/path/to/iconv/lib --with-iconv-include=/path/to/iconv/include EOL end g.include_library('xml2', 'xmlParseDoc') has_header = g.include_header('libxml/xmlversion.h', '/opt/include/libxml2', '/usr/local/include/libxml2', '/usr/include/libxml2') unless g.include_library('xml2', 'xmlDocFormatDump') crash('Your version of libxml2 is too old. Please upgrade.') end unless g.has_function? 'docbCreateFileParserCtxt' crash('Need docbCreateFileParserCtxt') end endruby-mkrf-0.2.3.orig/test/sample_files/libxml-ruby-0.3.8/ext/xml/libxml.h0000644000000000000000000000410411672453175024475 0ustar rootroot/* Please see the LICENSE file for copyright and distribution information */ #ifndef __RUBY_LIBXML_H__ #define __RUBY_LIBXML_H__ /* Don't nuke this block! It is used for automatically updating the * versions below. VERSION = string formatting, VERNUM = numbered * version for inline testing: increment both or none at all. */ #define RUBY_LIBXML_VERSION "0.3.8" #define RUBY_LIBXML_VERNUM 38 #define RUBY_LIBXML_VER_MAJ 0 #define RUBY_LIBXML_VER_MIN 3 #define RUBY_LIBXML_VER_MIC 8 #include #include #include #include #include #include #include #include #include #include #include #ifdef LIBXML_DEBUG_ENABLED #include #endif #ifdef LIBXML_XINCLUDE_ENABLED #include #endif #ifdef LIBXML_XPTR_ENABLED #include #endif #define RUBY_LIBXML_SRC_TYPE_NULL 0 #define RUBY_LIBXML_SRC_TYPE_FILE 1 #define RUBY_LIBXML_SRC_TYPE_STRING 2 #define RUBY_LIBXML_SRC_TYPE_IO 3 #define RUBY_LIBXML_SRC_TYPE_XPATH 4 typedef struct rx_file_data { VALUE filename; /* Filename/path to self */ } rx_file_data; typedef struct rx_io_data { VALUE io; } rx_io_data; typedef struct rx_string_data { VALUE str; } rx_string_data; typedef struct rx_xpath_data { VALUE ctxt; } rx_xpath_data; #include "ruby_xml_attr.h" #include "ruby_xml_attribute.h" #include "ruby_xml_document.h" #include "ruby_xml_node.h" #include "ruby_xml_node_set.h" #include "ruby_xml_ns.h" #include "ruby_xml_parser.h" #include "ruby_xml_parser_context.h" #include "ruby_xml_sax_parser.h" #include "ruby_xml_tree.h" #include "ruby_xml_xinclude.h" #include "ruby_xml_xpath.h" #include "ruby_xml_xpath_context.h" #include "ruby_xml_xpointer.h" #include "ruby_xml_xpointer_context.h" #include "ruby_xml_input_cbg.h" #include "ruby_xml_dtd.h" #include "ruby_xml_schema.h" extern VALUE mXML; void ruby_init_parser(void); void ruby_xml_parser_free(ruby_xml_parser *rxp); #endif ruby-mkrf-0.2.3.orig/test/sample_files/libxml-ruby-0.3.8/ext/xml/ruby_xml_dtd.c0000644000000000000000000001106211672453175025676 0ustar rootroot#include "libxml.h" #include "ruby_xml_dtd.h" VALUE cXMLDtd; void ruby_xml_dtd_free(ruby_xml_dtd *rxdtd) { if (rxdtd->dtd != NULL) { xmlFreeDtd(rxdtd->dtd); rxdtd->dtd = NULL; } free(rxdtd); } static void ruby_xml_dtd_mark(ruby_xml_dtd *rxdtd) { return; //if (rxdtd == NULL) return; //if (!NIL_P(rxd->xmlver)) rb_gc_mark(rxd->xmlver); } /* * call-seq: * XML::Dtd.new("public system") => dtd * XML::Dtd.new("public", "system") => dtd * * Create a new Dtd from the specified public and system * identifiers. */ VALUE ruby_xml_dtd_initialize(int argc, VALUE *argv, VALUE class) { ruby_xml_dtd *rxdtd; VALUE external, system, dtd_string; xmlParserInputBufferPtr buffer; xmlCharEncoding enc = XML_CHAR_ENCODING_NONE; xmlChar *new_string; // 1 argument -- string --> parsujeme jako dtd // 2 argumenty -- public, system --> bude se hledat switch (argc) { case 2: rb_scan_args(argc, argv, "20", &external, &system); Check_Type(external, T_STRING); Check_Type(system, T_STRING); rxdtd = ALLOC(ruby_xml_dtd); rxdtd->dtd = xmlParseDTD( (xmlChar*)StringValuePtr(external), (xmlChar*)StringValuePtr(system) ); if (rxdtd->dtd == NULL) { free(rxdtd); return(Qfalse); } xmlSetTreeDoc( (xmlNodePtr)rxdtd->dtd, NULL ); return( Data_Wrap_Struct(cXMLDtd, ruby_xml_dtd_mark, ruby_xml_dtd_free, rxdtd) ); break; /* SV * new(CLASS, external, system) char * CLASS char * external char * system ALIAS: parse_uri = 1 PREINIT: xmlDtdPtr dtd = NULL; CODE: LibXML_error = sv_2mortal(newSVpv("", 0)); dtd = xmlParseDTD((const xmlChar*)external, (const xmlChar*)system); if ( dtd == NULL ) { XSRETURN_UNDEF; } xmlSetTreeDoc((xmlNodePtr)dtd, NULL); RETVAL = PmmNodeToSv( (xmlNodePtr) dtd, NULL ); OUTPUT: RETVAL */ case 1: rb_scan_args(argc, argv, "10", &dtd_string); buffer = xmlAllocParserInputBuffer(enc); //if ( !buffer) return Qnil new_string = xmlStrdup((xmlChar*)StringValuePtr(dtd_string)); xmlParserInputBufferPush(buffer, xmlStrlen(new_string), (const char*)new_string); rxdtd = ALLOC(ruby_xml_dtd); rxdtd->dtd = xmlIOParseDTD(NULL, buffer, enc); // NOTE: For some reason freeing this InputBuffer causes a segfault! // xmlFreeParserInputBuffer(buffer); xmlFree(new_string); return( Data_Wrap_Struct(cXMLDtd, ruby_xml_dtd_mark, ruby_xml_dtd_free, rxdtd) ); break; /* SV * parse_string(CLASS, str, ...) char * CLASS char * str PREINIT: STRLEN n_a; xmlDtdPtr res; SV * encoding_sv; xmlParserInputBufferPtr buffer; xmlCharEncoding enc = XML_CHAR_ENCODING_NONE; xmlChar * new_string; STRLEN len; CODE: LibXML_init_error(); if (items > 2) { encoding_sv = ST(2); if (items > 3) { croak("parse_string: too many parameters"); } // warn("getting encoding...\n"); enc = xmlParseCharEncoding(SvPV(encoding_sv, n_a)); if (enc == XML_CHAR_ENCODING_ERROR) { croak("Parse of encoding %s failed: %s", SvPV(encoding_sv, n_a), SvPV(LibXML_error, n_a)); } } buffer = xmlAllocParserInputBuffer(enc); // buffer = xmlParserInputBufferCreateMem(str, xmlStrlen(str), enc); if ( !buffer) croak("cant create buffer!\n" ); new_string = xmlStrdup((const xmlChar*)str); xmlParserInputBufferPush(buffer, xmlStrlen(new_string), (const char*)new_string); res = xmlIOParseDTD(NULL, buffer, enc); // NOTE: For some reason freeing this InputBuffer causes a segfault! // xmlFreeParserInputBuffer(buffer); xmlFree(new_string); sv_2mortal( LibXML_error ); LibXML_croak_error(); if (res == NULL) { croak("no DTD parsed!"); } RETVAL = PmmNodeToSv((xmlNodePtr)res, NULL); OUTPUT: RETVAL */ default: rb_raise(rb_eArgError, "wrong number of arguments (need 1 or 2)"); } //docobj = ruby_xml_document_new2(cXMLDocument, xmlver); return Qnil; } // Rdoc needs to know #ifdef RDOC_NEVER_DEFINED mXML = rb_define_module("XML"); #endif void ruby_init_xml_dtd(void) { cXMLDtd = rb_define_class_under(mXML, "Dtd", rb_cObject); rb_define_singleton_method(cXMLDtd, "new", ruby_xml_dtd_initialize, -1); //rb_define_method(cXMLDocument, "xinclude", ruby_xml_document_xinclude, 0); } ruby-mkrf-0.2.3.orig/test/sample_files/libxml-ruby-0.3.8/ext/xml/ruby_xml_attr.c0000644000000000000000000001723211672453175026102 0ustar rootroot/* $Id: ruby_xml_attr.c,v 1.1 2006/02/21 20:40:16 roscopeco Exp $ */ /* Please see the LICENSE file for copyright and distribution information */ #include "libxml.h" #include "ruby_xml_attr.h" VALUE cXMLAttr; void ruby_xml_attr_free(ruby_xml_attr *rxa) { if (rxa->attr != NULL && !rxa->is_ptr) { xmlUnlinkNode((xmlNodePtr)rxa->attr); xmlFreeNode((xmlNodePtr)rxa->attr); rxa->attr = NULL; } free(rxa); } /* * call-seq: * attr.child => node * * Obtain this attribute's child attribute(s). */ VALUE ruby_xml_attr_child_get(VALUE self) { ruby_xml_attr *rxa; Data_Get_Struct(self, ruby_xml_attr, rxa); if (rxa->attr->children == NULL) return(Qnil); else return(ruby_xml_node_new2(cXMLNode, rxa->xd, rxa->attr->children)); } /* * call-seq: * attr.child? => (true|false) * * Determine whether this attribute has child attributes. */ VALUE ruby_xml_attr_child_q(VALUE self) { ruby_xml_attr *rxa; Data_Get_Struct(self, ruby_xml_attr, rxa); if (rxa->attr->children == NULL) return(Qfalse); else return(Qtrue); } /* * call-seq: * attr.doc => document * * Obtain the XML::Document this attribute is associated with, * if any. */ VALUE ruby_xml_attr_doc_get(VALUE self) { ruby_xml_attr *rxa; Data_Get_Struct(self, ruby_xml_attr, rxa); if (rxa->attr->doc == NULL) return(Qnil); else return(ruby_xml_document_new(cXMLDocument, rxa->attr->doc)); } /* * call-seq: * attr.doc? => (true|false) * * Determine whether this attribute is associated with an * XML::Document. */ VALUE ruby_xml_attr_doc_q(VALUE self) { ruby_xml_attr *rxa; Data_Get_Struct(self, ruby_xml_attr, rxa); if (rxa->attr->doc == NULL) return(Qfalse); else return(Qtrue); } /* * call-seq: * attr.last => node * * Obtain the last attribute. */ VALUE ruby_xml_attr_last_get(VALUE self) { ruby_xml_attr *rxa; Data_Get_Struct(self, ruby_xml_attr, rxa); if (rxa->attr->last == NULL) return(Qnil); else return(ruby_xml_node_new2(cXMLNode, rxa->xd, rxa->attr->last)); } /* * call-seq: * attr.last? => (true|false) * * Determine whether this is the last attribute. */ VALUE ruby_xml_attr_last_q(VALUE self) { ruby_xml_attr *rxa; Data_Get_Struct(self, ruby_xml_attr, rxa); if (rxa->attr->last == NULL) return(Qfalse); else return(Qtrue); } static void ruby_xml_attr_mark(ruby_xml_attr *rxa) { if (rxa == NULL) return; if (!NIL_P(rxa->xd)) rb_gc_mark(rxa->xd); } /* * call-seq: * attr.name => "name" * * Obtain this attribute's name. */ VALUE ruby_xml_attr_name_get(VALUE self) { ruby_xml_attr *rxa; Data_Get_Struct(self, ruby_xml_attr, rxa); if (rxa->attr->name == NULL) return(Qnil); else return(rb_str_new2((const char*)rxa->attr->name)); } VALUE ruby_xml_attr_new(VALUE class, VALUE xd, xmlAttrPtr attr) { ruby_xml_attr *rxa; rxa = ALLOC(ruby_xml_attr); rxa->attr = attr; rxa->xd = xd; rxa->is_ptr = 0; return(Data_Wrap_Struct(class, ruby_xml_attr_mark, ruby_xml_attr_free, rxa)); } VALUE ruby_xml_attr_new2(VALUE class, VALUE xd, xmlAttrPtr attr) { ruby_xml_attr *rxa; rxa = ALLOC(ruby_xml_attr); rxa->attr = xmlCopyProp(attr->parent, attr); rxa->xd = xd; rxa->is_ptr = 0; return(Data_Wrap_Struct(class, ruby_xml_attr_mark, ruby_xml_attr_free, rxa)); } /* * call-seq: * attr.next => node * * Obtain the next attribute. */ VALUE ruby_xml_attr_next_get(VALUE self) { ruby_xml_attr *rxa; Data_Get_Struct(self, ruby_xml_attr, rxa); if (rxa->attr->next == NULL) return(Qnil); else return(ruby_xml_attr_new(cXMLAttr, rxa->xd, rxa->attr->next)); } /* * call-seq: * attr.next? => (true|false) * * Determine whether there is a next attribute. */ VALUE ruby_xml_attr_next_q(VALUE self) { ruby_xml_attr *rxa; Data_Get_Struct(self, ruby_xml_attr, rxa); if (rxa->attr->next == NULL) return(Qfalse); else return(Qtrue); } /* * call-seq: * attr.type_name => "attribute" * * Obtain this attribute node's type name. */ VALUE ruby_xml_attr_node_type_name(VALUE self) { /* I think libxml2's naming convention blows monkey ass */ return(rb_str_new2("attribute")); } /* * call-seq: * attr.ns => namespace * * Obtain this attribute's associated XML::NS, if any. */ VALUE ruby_xml_attr_ns_get(VALUE self) { ruby_xml_attr *rxa; Data_Get_Struct(self, ruby_xml_attr, rxa); if (rxa->attr->ns == NULL) return(Qnil); else return(ruby_xml_ns_new2(cXMLNS, rxa->xd, rxa->attr->ns)); } /* * call-seq: * attr.ns? => (true|false) * * Determine whether this attribute has an associated * namespace. */ VALUE ruby_xml_attr_ns_q(VALUE self) { ruby_xml_attr *rxa; Data_Get_Struct(self, ruby_xml_attr, rxa); if (rxa->attr->ns == NULL) return(Qfalse); else return(Qtrue); } /* * call-seq: * attr.parent => node * * Obtain this attribute node's parent. */ VALUE ruby_xml_attr_parent_get(VALUE self) { ruby_xml_attr *rxa; Data_Get_Struct(self, ruby_xml_attr, rxa); if (rxa->attr->parent == NULL) return(Qnil); else return(ruby_xml_node_new2(cXMLNode, rxa->xd, rxa->attr->parent)); } /* * call-seq: * attr.parent? => (true|false) * * Determine whether this attribute has a parent. */ VALUE ruby_xml_attr_parent_q(VALUE self) { ruby_xml_attr *rxa; Data_Get_Struct(self, ruby_xml_attr, rxa); if (rxa->attr->parent == NULL) return(Qfalse); else return(Qtrue); } /* * call-seq: * attr.prev => node * * Obtain the previous attribute. */ VALUE ruby_xml_attr_prev_get(VALUE self) { ruby_xml_attr *rxa; Data_Get_Struct(self, ruby_xml_attr, rxa); if (rxa->attr->prev == NULL) return(Qnil); else return(ruby_xml_attr_new(cXMLAttr, rxa->xd, rxa->attr->prev)); } /* * call-seq: * attr.prev? => (true|false) * * Determine whether there is a previous attribute. */ VALUE ruby_xml_attr_prev_q(VALUE self) { ruby_xml_attr *rxa; Data_Get_Struct(self, ruby_xml_attr, rxa); if (rxa->attr->prev == NULL) return(Qfalse); else return(Qtrue); } /* * call-seq: * attr.value => "value" * * Obtain the value of this attribute. */ VALUE ruby_xml_attr_value(VALUE self) { ruby_xml_attr *rxa; xmlChar *value; Data_Get_Struct(self, ruby_xml_attr, rxa); if (ruby_xml_attr_parent_q(self) == Qtrue) { value = xmlGetProp(rxa->attr->parent, rxa->attr->name); if (value != NULL) return(rb_str_new2((const char*)value)); } return(Qnil); } // Rdoc needs to know #ifdef RDOC_NEVER_DEFINED mXML = rb_define_module("XML"); #endif void ruby_init_xml_attr(void) { cXMLAttr = rb_define_class_under(mXML, "Attr", rb_cObject); rb_define_method(cXMLAttr, "child", ruby_xml_attr_child_get, 0); rb_define_method(cXMLAttr, "child?", ruby_xml_attr_child_q, 0); rb_define_method(cXMLAttr, "doc", ruby_xml_attr_doc_get, 0); rb_define_method(cXMLAttr, "doc?", ruby_xml_attr_doc_q, 0); rb_define_method(cXMLAttr, "last", ruby_xml_attr_last_get, 0); rb_define_method(cXMLAttr, "last?", ruby_xml_attr_last_q, 0); rb_define_method(cXMLAttr, "name", ruby_xml_attr_name_get, 0); rb_define_method(cXMLAttr, "next", ruby_xml_attr_next_get, 0); rb_define_method(cXMLAttr, "next?", ruby_xml_attr_next_q, 0); rb_define_method(cXMLAttr, "node_type_name", ruby_xml_attr_node_type_name, 0); rb_define_method(cXMLAttr, "ns", ruby_xml_attr_ns_get, 0); rb_define_method(cXMLAttr, "ns?", ruby_xml_attr_ns_q, 0); rb_define_method(cXMLAttr, "parent", ruby_xml_attr_parent_get, 0); rb_define_method(cXMLAttr, "parent?", ruby_xml_attr_parent_q, 0); rb_define_method(cXMLAttr, "prev", ruby_xml_attr_prev_get, 0); rb_define_method(cXMLAttr, "prev?", ruby_xml_attr_prev_q, 0); rb_define_method(cXMLAttr, "value", ruby_xml_attr_value, 0); } ruby-mkrf-0.2.3.orig/test/sample_files/libxml-ruby-0.3.8/ext/xml/ruby_xml_xinclude.c0000644000000000000000000000102711672453175026736 0ustar rootroot/* $Id: ruby_xml_xinclude.c,v 1.1 2006/02/21 20:40:16 roscopeco Exp $ */ /* Please see the LICENSE file for copyright and distribution information */ #include "libxml.h" #include "ruby_xml_xinclude.h" VALUE cXMLXInclude; VALUE eXMLXIncludeError; // Rdoc needs to know #ifdef RDOC_NEVER_DEFINED mXML = rb_define_module("XML"); #endif void ruby_init_xml_xinclude(void) { cXMLXInclude = rb_define_class_under(mXML, "XInclude", rb_cObject); eXMLXIncludeError = rb_define_class_under(cXMLXInclude, "Error", rb_eRuntimeError); } ruby-mkrf-0.2.3.orig/test/sample_files/libxml-ruby-0.3.8/ext/xml/ruby_xml_parser_context.h0000644000000000000000000000126511672453175030174 0ustar rootroot/* $Id: ruby_xml_parser_context.h,v 1.1 2006/02/21 20:40:16 roscopeco Exp $ */ /* Please see the LICENSE file for copyright and distribution information */ #ifndef __RUBY_XML_PARSER_CONTEXT__ #define __RUBY_XML_PARSER_CONTEXT__ extern VALUE cXMLParserContext; typedef struct ruby_xml_parser_context { xmlParserCtxtPtr ctxt; int is_ptr; } ruby_xml_parser_context; void ruby_xml_parser_context_free(ruby_xml_parser_context *ctxt); void ruby_init_xml_parser_context(void); VALUE ruby_xml_parser_context_new(VALUE class, xmlParserCtxtPtr ctxt); VALUE ruby_xml_parser_context_new2(VALUE class); VALUE ruby_xml_parser_context_new3(); VALUE ruby_xml_parser_context_each(VALUE self); #endif ruby-mkrf-0.2.3.orig/test/sample_files/libxml-ruby-0.3.8/ext/xml/old_extconf.rb0000644000000000000000000000426111672453175025672 0ustar rootroot#!/usr/bin/env ruby require 'mkmf' if defined?(CFLAGS) if CFLAGS.index(CONFIG['CCDLFLAGS']) $CFLAGS = CFLAGS else $CFLAGS = CFLAGS + ' ' + CONFIG['CCDLFLAGS'] end else $CFLAGS = CONFIG['CFLAGS'] end $LDFLAGS = CONFIG['LDFLAGS'] $LIBPATH.push(Config::CONFIG['libdir']) def crash(str) printf(" extconf failure: %s\n", str) exit 1 end dir_config('iconv') dir_config('xml2') dir_config('zlib') have_library('socket','socket') have_library('nsl','gethostbyname') unless have_library('m', 'atan') # try again for gcc 4.0 saveflags = $CFLAGS $CFLAGS += ' -fno-builtin' unless have_library('m', 'atan') crash('need libm') end $CFLAGS = saveflags end unless have_library('z', 'inflate') crash('need zlib') else $defs.push('-DHAVE_ZLIB_H') end unless have_library('iconv','iconv_open') or have_library('c','iconv_open') or have_library('recode','iconv_open') or have_library('iconv') crash(< (true|false) * * Dump libxml debugging information to stdout. * Requires Libxml be compiled with debugging enabled. */ VALUE ruby_xml_xpath_debug(VALUE self) { #ifdef LIBXML_DEBUG_ENABLED ruby_xml_xpath *rxxp; Data_Get_Struct(self, ruby_xml_xpath, rxxp); if (rxxp->xpop != NULL) { xmlXPathDebugDumpObject(stdout, rxxp->xpop, 0); return(Qtrue); } else { return(Qfalse); } #else rb_warn("libxml does not have debugging turned on"); return(Qfalse); #endif } // TODO Maybe we should support [] or some other kind of access if poss. /* * call-seq: * xpath.each { |node| ... } => self * * Call the supplied block for each matching node. */ VALUE ruby_xml_xpath_each(VALUE self) { ruby_xml_xpath *rxxp; VALUE rxnset; Data_Get_Struct(self, ruby_xml_xpath, rxxp); if (rxxp->xpop == NULL || rxxp->xpop->type != XPATH_NODESET) return(Qnil); rxnset = ruby_xml_node_set_new(cXMLNodeSet, rxxp->xd, self, rxxp->xpop->nodesetval); ruby_xml_node_set_each(rxnset); return(rxnset); } /////////////////////////////////////////////////// // TODO xpath_find is throwing TypeError: // // TypeError: can't convert nil into String // // When given a namespace when non exist. /* * call-seq: * XML::XPath.find(path, namespaces = [any]) => xpath * * Find nodes matching the specified xpath (and optionally any of the * supplied namespaces) and return as an XML::Node::Set. * * The optional namespaces argument may take one of * two forms: * * * A string in the form of: "prefix:uri", or * * An array of: * * strings in the form like above * * arrays in the form of ['prefix','uri'] * * If not specified, matching nodes from any namespace * will be included. */ VALUE ruby_xml_xpath_find(int argc, VALUE *argv, VALUE class) { #ifdef LIBXML_XPATH_ENABLED xmlXPathCompExprPtr comp; ruby_xml_node *node; ruby_xml_xpath *rxxp; ruby_xml_xpath_context *rxxpc; ruby_xml_ns *rxns; VALUE rnode, rprefix, ruri, xxpc, xpath, xpath_expr; char *cp; long i; switch(argc) { case 3: /* array of namespaces we allow. * * Accept either: * A string in the form of: "prefix:uri", or * An array of: * *) strings in the form like above * *) arrays in the form of ['prefix','uri'] */ /* Intentionally fall through, we deal with the last arg below * after the XPathContext object has been setup */ case 2: rnode = argv[0]; xpath_expr = argv[1]; break; default: rb_raise(rb_eArgError, "wrong number of arguments (1 or 2)"); } Data_Get_Struct(rnode, ruby_xml_node, node); xxpc = ruby_xml_xpath_context_new4(rnode); if (NIL_P(xxpc)) return(Qnil); Data_Get_Struct(xxpc, ruby_xml_xpath_context, rxxpc); xpath = ruby_xml_xpath_new(cXMLXPath, rnode, xxpc, NULL); Data_Get_Struct(xpath, ruby_xml_xpath, rxxp); rxxpc->ctxt->node = node->node; if (node->node->type == XML_DOCUMENT_NODE) { rxxpc->ctxt->namespaces = xmlGetNsList(node->node->doc, xmlDocGetRootElement(node->node->doc)); } else { rxxpc->ctxt->namespaces = xmlGetNsList(node->node->doc, node->node); } rxxpc->ctxt->nsNr = 0; if (rxxpc->ctxt->namespaces != NULL) { while (rxxpc->ctxt->namespaces[rxxpc->ctxt->nsNr] != NULL) rxxpc->ctxt->nsNr++; } /* Need to loop through the 2nd argument and iterate through the * list of namespaces that we want to allow */ if (argc == 3) { switch (TYPE(argv[2])) { case T_STRING: cp = strchr(StringValuePtr(argv[2]), (int)':'); if (cp == NULL) { rprefix = argv[2]; ruri = Qnil; } else { rprefix = rb_str_new(StringValuePtr(argv[2]), (int)((long)cp - (long)StringValuePtr(argv[2]))); ruri = rb_str_new2(&cp[1]); } /* Should test the results of this */ ruby_xml_xpath_context_register_namespace(xxpc, rprefix, ruri); break; case T_ARRAY: for (i = 0; i < RARRAY(argv[2])->len; i++) { switch (TYPE(RARRAY(argv[2])->ptr[i])) { case T_STRING: cp = strchr(StringValuePtr(RARRAY(argv[2])->ptr[i]), (int)':'); if (cp == NULL) { rprefix = RARRAY(argv[2])->ptr[i]; ruri = Qnil; } else { rprefix = rb_str_new(StringValuePtr(RARRAY(argv[2])->ptr[i]), (int)((long)cp - (long)StringValuePtr(RARRAY(argv[2])->ptr[i]))); ruri = rb_str_new2(&cp[1]); } /* Should test the results of this */ ruby_xml_xpath_context_register_namespace(xxpc, rprefix, ruri); break; case T_ARRAY: if (RARRAY(RARRAY(argv[2])->ptr[i])->len == 2) { rprefix = RARRAY(RARRAY(argv[2])->ptr[i])->ptr[0]; ruri = RARRAY(RARRAY(argv[2])->ptr[i])->ptr[1]; ruby_xml_xpath_context_register_namespace(xxpc, rprefix, ruri); } else { rb_raise(rb_eArgError, "nested array must be an array of strings, prefix and href/uri"); } break; default: if (rb_obj_is_kind_of(RARRAY(argv[2])->ptr[i], cXMLNS) == Qtrue) { Data_Get_Struct(argv[2], ruby_xml_ns, rxns); rprefix = rb_str_new2((const char*)rxns->ns->prefix); ruri = rb_str_new2((const char*)rxns->ns->href); ruby_xml_xpath_context_register_namespace(xxpc, rprefix, ruri); } else rb_raise(rb_eArgError, "Invalid argument type, only accept string, array of strings, or an array of arrays"); } } break; default: if (rb_obj_is_kind_of(argv[2], cXMLNS) == Qtrue) { Data_Get_Struct(argv[2], ruby_xml_ns, rxns); rprefix = rb_str_new2((const char*)rxns->ns->prefix); ruri = rb_str_new2((const char*)rxns->ns->href); ruby_xml_xpath_context_register_namespace(xxpc, rprefix, ruri); } else rb_raise(rb_eArgError, "Invalid argument type, only accept string, array of strings, or an array of arrays"); } } comp = xmlXPathCompile((xmlChar*)StringValuePtr(xpath_expr)); if (comp == NULL) { xmlXPathFreeCompExpr(comp); rb_raise(eXMLXPathInvalidPath, "Invalid XPath expression"); } rxxp->xpop = xmlXPathCompiledEval(comp, rxxpc->ctxt); xmlXPathFreeCompExpr(comp); if (rxxpc->ctxt->namespaces != NULL) xmlFree(rxxpc->ctxt->namespaces); if (rxxp->xpop == NULL) rb_raise(eXMLXPathInvalidPath, "Invalid XPath expression for this document"); if (rxxp->xpop->type != XPATH_NODESET) return(Qnil); return(ruby_xml_node_set_new2(node->xd, xpath, rxxp->xpop->nodesetval)); #else rb_warn("libxml was compiled without XPath support"); return(Qfalse); #endif } VALUE ruby_xml_xpath_find2(int argc, VALUE *argv) { return(ruby_xml_xpath_find(argc, argv, cXMLXPath)); } void ruby_xml_xpath_free(ruby_xml_xpath *rxxp) { if (rxxp->xpop != NULL) { xmlXPathFreeObject(rxxp->xpop); rxxp->xpop = NULL; } free(rxxp); } void ruby_xml_xpath_mark(ruby_xml_xpath *rxxp) { if (rxxp == NULL) return; if (!NIL_P(rxxp->ctxt)) rb_gc_mark(rxxp->ctxt); if (!NIL_P(rxxp->xd)) rb_gc_mark(rxxp->xd); } VALUE ruby_xml_xpath_new(VALUE class, VALUE xd, VALUE ctxt, xmlXPathObjectPtr xpop) { ruby_xml_xpath *rxxp; rxxp = ALLOC(ruby_xml_xpath); rxxp->ctxt = ctxt; rxxp->xd = xd; rxxp->xpop = xpop; return(Data_Wrap_Struct(class, ruby_xml_xpath_mark, ruby_xml_xpath_free, rxxp)); } /* * call-seq: * xpath.set => nodeset * * Obtain an XML::Node::Set with nodes matching this xpath. */ VALUE ruby_xml_xpath_set(VALUE self) { ruby_xml_xpath *rxxp; Data_Get_Struct(self, ruby_xml_xpath, rxxp); if (rxxp->xpop == NULL || rxxp->xpop->type != XPATH_NODESET) return(Qnil); return(ruby_xml_node_set_new(cXMLNodeSet, rxxp->xd, self, rxxp->xpop->nodesetval)); } /* * call-seq: * xpath.set_type => num * * Obtains the type identifier of this xpath * set. */ VALUE ruby_xml_xpath_set_type(VALUE self) { ruby_xml_xpath *rxxp; Data_Get_Struct(self, ruby_xml_xpath, rxxp); return(INT2FIX(rxxp->xpop->type)); } // TODO maybe 'string' should alias as 'to_s'? /* * call-seq: * xpath.string => "xpath" * * Obtain a string representation of this xpath. */ VALUE ruby_xml_xpath_string(VALUE self) { ruby_xml_xpath *rxxp; Data_Get_Struct(self, ruby_xml_xpath, rxxp); if (rxxp->xpop->stringval == NULL) return(Qnil); else return(rb_str_new2((const char*)rxxp->xpop->stringval)); } // Rdoc needs to know #ifdef RDOC_NEVER_DEFINED mXML = rb_define_module("XML"); #endif void ruby_init_xml_xpath(void) { cXMLXPath = rb_define_class_under(mXML, "XPath", rb_cObject); rb_include_module(cXMLNode, rb_const_get(rb_cObject, rb_intern("Enumerable"))); eXMLXPathInvalidPath = rb_define_class_under(cXMLXPath, "InvalidPath", rb_eException); rb_define_const(cXMLXPath, "UNDEFINED", INT2NUM(XPATH_UNDEFINED)); rb_define_const(cXMLXPath, "NODESET", INT2NUM(XPATH_NODESET)); rb_define_const(cXMLXPath, "BOOLEAN", INT2NUM(XPATH_BOOLEAN)); rb_define_const(cXMLXPath, "NUMBER", INT2NUM(XPATH_NUMBER)); rb_define_const(cXMLXPath, "STRING", INT2NUM(XPATH_STRING)); rb_define_const(cXMLXPath, "POINT", INT2NUM(XPATH_POINT)); rb_define_const(cXMLXPath, "RANGE", INT2NUM(XPATH_RANGE)); rb_define_const(cXMLXPath, "LOCATIONSET", INT2NUM(XPATH_LOCATIONSET)); rb_define_const(cXMLXPath, "USERS", INT2NUM(XPATH_USERS)); rb_define_const(cXMLXPath, "XSLT_TREE", INT2NUM(XPATH_XSLT_TREE)); rb_define_singleton_method(cXMLXPath, "find", ruby_xml_xpath_find, 2); rb_define_method(cXMLXPath, "debug", ruby_xml_xpath_debug, 0); rb_define_method(cXMLXPath, "each", ruby_xml_xpath_each, 0); rb_define_method(cXMLXPath, "set", ruby_xml_xpath_set, 0); rb_define_method(cXMLXPath, "set_type", ruby_xml_xpath_set_type, 0); rb_define_method(cXMLXPath, "string", ruby_xml_xpath_string, 0); } #endif /* ifdef LIBXML_XPATH_ENABLED */ ruby-mkrf-0.2.3.orig/test/sample_files/libxml-ruby-0.3.8/ext/xml/ruby_xml_input_cbg.c0000644000000000000000000001135411672453175027101 0ustar rootroot /* ruby support for custom scheme handlers */ /* Author: Martin Povolny (xpovolny@fi.muni.cz) */ #include "libxml.h" #include "ruby_xml_input_cbg.h" static ic_scheme *first_scheme = 0; int ic_match (char const *filename) { ic_scheme *scheme; //fprintf( stderr, "ic_match: %s\n", filename ); scheme = first_scheme; while (0 != scheme) { if (!xmlStrncasecmp(BAD_CAST filename, BAD_CAST scheme->scheme_name, scheme->name_len)) { return 1; } scheme = scheme->next_scheme; } return 0; } void* ic_open (char const *filename) { ic_doc_context *ic_doc; ic_scheme *scheme; VALUE res; scheme = first_scheme; while (0 != scheme) { if (!xmlStrncasecmp(BAD_CAST filename, BAD_CAST scheme->scheme_name, scheme->name_len)) { ic_doc = (ic_doc_context*)malloc( sizeof(ic_doc_context) ); // MUFF res = rb_funcall( // rb_funcall( rb_mKernel, // rb_intern("const_get"), 1, // rb_str_new2(scheme->class) ), // rb_intern("document_query"), 1, rb_str_new2(filename) ); res = rb_funcall( scheme->class, rb_intern("document_query"), 1, rb_str_new2(filename) ); ic_doc->buffer = strdup( StringValuePtr(res) ); ic_doc->bpos = ic_doc->buffer; ic_doc->remaining = strlen(ic_doc->buffer); return ic_doc; } scheme = scheme->next_scheme; } return 0; } int ic_read (void *context, char *buffer, int len) { ic_doc_context *ic_doc; int ret_len; ic_doc = (ic_doc_context*)context; if (len >= ic_doc->remaining) { ret_len = ic_doc->remaining; } else { ret_len = len; } ic_doc->remaining -= ret_len; strncpy( buffer, ic_doc->bpos, ret_len ); ic_doc->bpos += ret_len; return ret_len; } int ic_close (void *context) { free( ((ic_doc_context*)context)->buffer ); free( context ); return 1; } VALUE input_callbacks_register_input_callbacks() { xmlRegisterInputCallbacks( ic_match, ic_open, ic_read, ic_close ); return(Qtrue); } VALUE input_callbacks_add_scheme (VALUE self, VALUE scheme_name, VALUE class) { ic_scheme *scheme; Check_Type(scheme_name, T_STRING); //MUFF Check_Type(class, T_STRING); scheme = (ic_scheme*)malloc(sizeof(ic_scheme)); scheme->next_scheme = 0; scheme->scheme_name = strdup(StringValuePtr(scheme_name)); /* TODO alloc, dealloc */ scheme->name_len = strlen(scheme->scheme_name); //MUFF scheme->class = strdup(StringValuePtr(class)); /* TODO alloc, dealloc */ scheme->class = class; /* TODO alloc, dealloc */ //fprintf( stderr, "registered: %s, %d, %s\n", scheme->scheme_name, scheme->name_len, scheme->class ); if (0 == first_scheme) first_scheme = scheme; else { ic_scheme *pos; pos = first_scheme; while (0 != pos->next_scheme) pos = pos->next_scheme; pos->next_scheme = scheme; } return(Qtrue); } VALUE input_callbacks_remove_scheme (VALUE self, VALUE scheme_name) { char *name; ic_scheme *save_scheme, *scheme; Check_Type(scheme_name, T_STRING); name = StringValuePtr(scheme_name); if (0 == first_scheme) return Qfalse; /* check the first one */ if (!strncmp(name, first_scheme->scheme_name, first_scheme->name_len)) { save_scheme = first_scheme->next_scheme; free(first_scheme->scheme_name); //MUFF free(first_scheme->class); free(first_scheme); first_scheme = save_scheme; return Qtrue; } scheme = first_scheme; while (0 != scheme->next_scheme) { if ( !strncmp( name, scheme->next_scheme->scheme_name, scheme->next_scheme->name_len ) ) { save_scheme = scheme->next_scheme->next_scheme; free(scheme->next_scheme->scheme_name); //MUFF free(scheme->next_scheme->class); free(scheme->next_scheme); scheme->next_scheme = save_scheme; return Qtrue; } scheme = scheme->next_scheme; } return Qfalse; } // Rdoc needs to know #ifdef RDOC_NEVER_DEFINED mXML = rb_define_module("XML"); #endif void ruby_init_input_callbacks(void) { VALUE cInputCallbacks; cInputCallbacks = rb_define_class_under(mXML, "InputCallbacks", rb_cObject); /* Class Methods */ rb_define_singleton_method(cInputCallbacks, "register", input_callbacks_register_input_callbacks, 0); rb_define_singleton_method(cInputCallbacks, "add_scheme", input_callbacks_add_scheme, 2); rb_define_singleton_method(cInputCallbacks, "remove_scheme", input_callbacks_remove_scheme, 1); } ruby-mkrf-0.2.3.orig/test/sample_files/libxml-ruby-0.3.8/ext/xml/ruby_xml_parser.c0000644000000000000000000010231111672453175026415 0ustar rootroot/* $Id: ruby_xml_parser.c,v 1.3 2006/03/27 20:49:19 roscopeco Exp $ */ /* Please see the LICENSE file for copyright and distribution information */ #include "libxml.h" static VALUE libxml_xmlRubyErrorProc = Qnil; static int id_call; int ruby_xml_parser_count = 0; VALUE cXMLParser; VALUE eXMLParserParseError; static int ctxtRead(FILE *f, char * buf, int len) { return(fread(buf, 1, len, f)); } /* * call-seq: * XML::Parser.catalog_dump => true * * Dump the parser resource catalogs to stdout. */ VALUE ruby_xml_parser_catalog_dump(VALUE self) { xmlCatalogDump(stdout); return(Qtrue); } /* * call-seq: * XML::Parser.catalog_remove(catalog) => true * * Remove the specified resource catalog. */ VALUE ruby_xml_parser_catalog_remove(VALUE self, VALUE cat) { Check_Type(cat, T_STRING); xmlCatalogRemove((xmlChar *)StringValuePtr(cat)); return(Qtrue); } /* * call-seq: * XML::Parser.check_lib_versions => true * * Check LIBXML version matches version the bindings * were compiled to. Throws an exception if not. */ VALUE ruby_xml_parser_check_lib_versions(VALUE class) { xmlCheckVersion(LIBXML_VERSION); return(Qtrue); } /* * call-seq: * XML::Parser.enabled_automata? => (true|false) * * Determine whether libxml regexp automata support is enabled. */ VALUE ruby_xml_parser_enabled_automata_q(VALUE class) { #ifdef LIBXML_AUTOMATA_ENABLED return(Qtrue); #else return(Qfalse); #endif } /* * call-seq: * XML::Parser.enabled_c14n? => (true|false) * * Determine whether libxml 'canonical XML' support is enabled. * See "Canonical XML" (http://www.w3.org/TR/xml-c14n) */ VALUE ruby_xml_parser_enabled_c14n_q(VALUE class) { #ifdef LIBXML_C14N_ENABLED return(Qtrue); #else return(Qfalse); #endif } /* * call-seq: * XML::Parser.enabled_catalog? => (true|false) * * Determine whether libxml resource catalog support is enabled. */ VALUE ruby_xml_parser_enabled_catalog_q(VALUE class) { #ifdef LIBXML_CATALOG_ENABLED return(Qtrue); #else return(Qfalse); #endif } /* * call-seq: * XML::Parser.enabled_debug? => (true|false) * * Determine whether libxml debugging support is enabled. */ VALUE ruby_xml_parser_enabled_debug_q(VALUE class) { #ifdef LIBXML_DEBUG_ENABLED return(Qtrue); #else return(Qfalse); #endif } /* * call-seq: * XML::Parser.enabled_docbook? => (true|false) * * Determine whether libxml docbook support is enabled. */ VALUE ruby_xml_parser_enabled_docbook_q(VALUE class) { #ifdef LIBXML_DOCB_ENABLED return(Qtrue); #else return(Qfalse); #endif } /* * call-seq: * XML::Parser.enabled_ftp? => (true|false) * * Determine whether libxml ftp client support is enabled. */ VALUE ruby_xml_parser_enabled_ftp_q(VALUE class) { #ifdef LIBXML_FTP_ENABLED return(Qtrue); #else return(Qfalse); #endif } /* * call-seq: * XML::Parser.enabled_http? => (true|false) * * Determine whether libxml http client support is enabled. */ VALUE ruby_xml_parser_enabled_http_q(VALUE class) { #ifdef LIBXML_HTTP_ENABLED return(Qtrue); #else return(Qfalse); #endif } /* * call-seq: * XML::Parser.enabled_html? => (true|false) * * Determine whether libxml html support is enabled. */ VALUE ruby_xml_parser_enabled_html_q(VALUE class) { #ifdef LIBXML_HTML_ENABLED return(Qtrue); #else return(Qfalse); #endif } /* * call-seq: * XML::Parser.enabled_iconv? => (true|false) * * Determine whether libxml iconv support is enabled. */ VALUE ruby_xml_parser_enabled_iconv_q(VALUE class) { #ifdef LIBXML_ICONV_ENABLED return(Qtrue); #else return(Qfalse); #endif } /* * call-seq: * XML::Parser.enabled_memory_debug? => (true|false) * * Determine whether libxml memory location debugging support * is enabled. */ VALUE ruby_xml_parser_enabled_memory_debug_location_q(VALUE class) { #ifdef DEBUG_MEMORY_LOCATION return(Qtrue); #else return(Qfalse); #endif } /* * call-seq: * XML::Parser.enabled_regexp? => (true|false) * * Determine whether libxml regular expression support is enabled. */ VALUE ruby_xml_parser_enabled_regexp_q(VALUE class) { #ifdef LIBXML_REGEXP_ENABLED return(Qtrue); #else return(Qfalse); #endif } /* * call-seq: * XML::Parser.enabled_schemas? => (true|false) * * Determine whether libxml schema support is enabled. */ VALUE ruby_xml_parser_enabled_schemas_q(VALUE class) { #ifdef LIBXML_SCHEMAS_ENABLED return(Qtrue); #else return(Qfalse); #endif } /* * call-seq: * XML::Parser.enabled_thread? => (true|false) * * Determine whether libxml thread-safe semantics support * is enabled (I think?). */ VALUE ruby_xml_parser_enabled_thread_q(VALUE class) { #ifdef LIBXML_THREAD_ENABLED return(Qtrue); #else return(Qfalse); #endif } /* * call-seq: * XML::Parser.enabled_unicode? => (true|false) * * Determine whether libxml unicode support is enabled. */ VALUE ruby_xml_parser_enabled_unicode_q(VALUE class) { #ifdef LIBXML_UNICODE_ENABLED return(Qtrue); #else return(Qfalse); #endif } /* * call-seq: * XML::Parser.enabled_xinclude? => (true|false) * * Determine whether libxml xinclude support is enabled. */ VALUE ruby_xml_parser_enabled_xinclude_q(VALUE class) { #ifdef LIBXML_XINCLUDE_ENABLED return(Qtrue); #else return(Qfalse); #endif } /* * call-seq: * XML::Parser.enabled_xpath? => (true|false) * * Determine whether libxml xpath support is enabled. */ VALUE ruby_xml_parser_enabled_xpath_q(VALUE class) { #ifdef LIBXML_XPATH_ENABLED return(Qtrue); #else return(Qfalse); #endif } /* * call-seq: * XML::Parser.enabled_xpointer? => (true|false) * * Determine whether libxml xpointer support is enabled. */ VALUE ruby_xml_parser_enabled_xpointer_q(VALUE class) { #ifdef LIBXML_XPTR_ENABLED return(Qtrue); #else return(Qfalse); #endif } /* * call-seq: * XML::Parser.enabled_zlib? => (true|false) * * Determine whether libxml zlib support is enabled. */ VALUE ruby_xml_parser_enabled_zlib_q(VALUE class) { #ifdef HAVE_ZLIB_H return(Qtrue); #else return(Qfalse); #endif } /* * call-seq: * XML::Parser.debug_entities => (true|false) * * Determine whether included-entity debugging is enabled. * (Requires Libxml to be compiled with debugging support) */ VALUE ruby_xml_parser_debug_entities_get(VALUE class) { #ifdef LIBXML_DEBUG_ENABLED if (xmlParserDebugEntities) return(Qtrue); else return(Qfalse); #else rb_warn("libxml was compiled with debugging turned off"); return(Qfalse); #endif } /* * call-seq: * XML::Parser.debug_entities = true|false * * Enable or disable included-entity debugging. * (Requires Libxml to be compiled with debugging support) */ VALUE ruby_xml_parser_debug_entities_set(VALUE class, VALUE bool) { #ifdef LIBXML_DEBUG_ENABLED if (TYPE(bool) == T_FALSE) { xmlParserDebugEntities = 0; return(Qfalse); } else { xmlParserDebugEntities = 1; return(Qtrue); } #else rb_warn("libxml was compiled with debugging turned off"); #endif } /* * call-seq: * XML::Parser.default_keep_blanks => (true|false) * * Determine whether parsers retain whitespace by default. */ VALUE ruby_xml_parser_default_keep_blanks_get(VALUE class) { if (xmlKeepBlanksDefaultValue) return(Qtrue); else return(Qfalse); } /* * call-seq: * XML::Parser.default_keep_blanks = true|false * * Controls whether parsers retain whitespace by default. */ VALUE ruby_xml_parser_default_keep_blanks_set(VALUE class, VALUE bool) { if (TYPE(bool) == T_FALSE) { xmlKeepBlanksDefaultValue = 0; return(Qfalse); } else if (TYPE(bool) == T_TRUE) { xmlKeepBlanksDefaultValue = 1; return(Qtrue); } else { rb_raise(rb_eArgError, "invalid argument, must be a boolean"); } } /* * call-seq: * XML::Parser.default_load_external_dtd => (true|false) * * Determine whether parsers load external DTDs by default. */ VALUE ruby_xml_parser_default_load_external_dtd_get(VALUE class) { if (xmlSubstituteEntitiesDefaultValue) return(Qtrue); else return(Qfalse); } /* * call-seq: * XML::Parser.default_load_external_dtd = true|false * * Controls whether parsers load external DTDs by default. */ VALUE ruby_xml_parser_default_load_external_dtd_set(VALUE class, VALUE bool) { if (TYPE(bool) == T_FALSE) { xmlLoadExtDtdDefaultValue = 0; return(Qfalse); } else { xmlLoadExtDtdDefaultValue = 1; return(Qtrue); } } /* * call-seq: * XML::Parser.default_line_numbers => (true|false) * * Determine whether parsers retain line-numbers by default. */ VALUE ruby_xml_parser_default_line_numbers_get(VALUE class) { if (xmlLineNumbersDefaultValue) return(Qtrue); else return(Qfalse); } /* * call-seq: * XML::Parser.default_line_numbers = true|false * * Controls whether parsers retain line-numbers by default. */ VALUE ruby_xml_parser_default_line_numbers_set(VALUE class, VALUE bool) { if (TYPE(bool) == T_FALSE) { xmlLineNumbersDefault(0); return(Qfalse); } else { xmlLineNumbersDefault(1); return(Qtrue); } } /* * call-seq: * XML::Parser.default_pedantic_parser => (true|false) * * Determine whether parsers are pedantic by default. */ VALUE ruby_xml_parser_default_pedantic_parser_get(VALUE class) { if (xmlPedanticParserDefaultValue) return(Qtrue); else return(Qfalse); } /* * call-seq: * XML::Parser.default_pedantic_parser = true|false * * Controls whether parsers are pedantic by default. */ VALUE ruby_xml_parser_default_pedantic_parser_set(VALUE class, VALUE bool) { if (TYPE(bool) == T_FALSE) { xmlPedanticParserDefault(0); return(Qfalse); } else { xmlPedanticParserDefault(1); return(Qtrue); } } /* * call-seq: * XML::Parser.default_substitute_entities => (true|false) * * Determine whether parsers perform inline entity substitution * (for external entities) by default. */ VALUE ruby_xml_parser_default_substitute_entities_get(VALUE class) { if (xmlSubstituteEntitiesDefaultValue) return(Qtrue); else return(Qfalse); } /* * call-seq: * XML::Parser.default_substitute_entities = true|false * * Controls whether parsers perform inline entity substitution * (for external entities) by default. */ VALUE ruby_xml_parser_default_substitute_entities_set(VALUE class, VALUE bool) { if (TYPE(bool) == T_FALSE) { xmlSubstituteEntitiesDefault(0); return(Qfalse); } else { xmlSubstituteEntitiesDefault(1); return(Qtrue); } } /* * call-seq: * XML::Parser.default_tree_indent_string => "string" * * Obtain the default string used by parsers to indent the XML tree * for output. */ VALUE ruby_xml_parser_default_tree_indent_string_get(VALUE class) { if (xmlTreeIndentString == NULL) return(Qnil); else return(rb_str_new2(xmlTreeIndentString)); } /* * call-seq: * XML::Parser.default_tree_indent_string = "string" * * Set the default string used by parsers to indent the XML tree * for output. */ VALUE ruby_xml_parser_default_tree_indent_string_set(VALUE class, VALUE string) { Check_Type(string, T_STRING); xmlTreeIndentString = ruby_strdup(StringValuePtr(string)); return(string); } /* * call-seq: * XML::Parser.default_validity_checking => (true|false) * * Determine whether parsers perform XML validation by default. */ VALUE ruby_xml_parser_default_validity_checking_get(VALUE class) { if (xmlDoValidityCheckingDefaultValue) return(Qtrue); else return(Qfalse); } /* * call-seq: * XML::Parser.default_validity_checking = true|false * * Controls whether parsers perform XML validation by default. */ VALUE ruby_xml_parser_default_validity_checking_set(VALUE class, VALUE bool) { if (TYPE(bool) == T_FALSE) { xmlDoValidityCheckingDefaultValue = 0; return(Qfalse); } else { xmlDoValidityCheckingDefaultValue = 1; return(Qtrue); } } /* * call-seq: * XML::Parser.default_warnings => (true|false) * * Determine whether parsers output warnings by default. */ VALUE ruby_xml_parser_default_warnings_get(VALUE class) { if (xmlGetWarningsDefaultValue) return(Qtrue); else return(Qfalse); } /* * call-seq: * XML::Parser.default_warnings = true|false * * Controls whether parsers output warnings by default. */ VALUE ruby_xml_parser_default_warnings_set(VALUE class, VALUE bool) { if (TYPE(bool) == T_FALSE) { xmlGetWarningsDefaultValue = 0; return(Qfalse); } else { xmlGetWarningsDefaultValue = 1; return(Qtrue); } } /* * call-seq: * XML::Parser.default_compression => (true|false) * * Determine whether parsers use Zlib compression by default * (requires libxml to be compiled with Zlib support). */ VALUE ruby_xml_parser_default_compression_get(VALUE class) { #ifdef HAVE_ZLIB_H return(INT2FIX(xmlGetCompressMode())); #else rb_warn("libxml was compiled without zlib support"); return(Qfalse); #endif } /* * call-seq: * XML::Parser.default_compression = true|false * * Controls whether parsers use Zlib compression by default * (requires libxml to be compiled with Zlib support). */ VALUE ruby_xml_parser_default_compression_set(VALUE class, VALUE num) { #ifdef HAVE_ZLIB_H Check_Type(num, T_FIXNUM); xmlSetCompressMode(FIX2INT(num)); return(num); #else rb_warn("libxml was compiled without zlib support"); return(Qfalse); #endif } /* * call-seq: * XML::Parser.features => ["feature", ..., "feature"] * * Obtains an array of strings representing features supported * (and enabled) by the installed libxml. */ VALUE ruby_xml_parser_features(VALUE class) { VALUE arr, str; int i, len = MAX_LIBXML_FEATURES_LEN; char **list = NULL; list = ALLOC_N(char *,MAX_LIBXML_FEATURES_LEN); MEMZERO(list, char *, MAX_LIBXML_FEATURES_LEN); arr = rb_ary_new(); if (xmlGetFeaturesList(&len, (const char **)list) == -1) return Qnil; for (i = 0; i < len; i++) { str = rb_str_new2((const char *)list[i]); rb_gc_unregister_address(&str); rb_ary_push(arr, str); } if (len == MAX_LIBXML_FEATURES_LEN) rb_warn("Please contact libxml-devel@rubyforge.org and ask to have the \"MAX_LIBXML_FEATURES_LEN increased\" because you could possibly be seeing an incomplete list"); ruby_xfree(list); return(arr); } /* * call-seq: * parser.filename => "filename" * * Obtain the filename this parser will read from. */ VALUE ruby_xml_parser_filename_get(VALUE self) { ruby_xml_parser *rxp; rx_file_data *data; Data_Get_Struct(self, ruby_xml_parser, rxp); if (rxp->data == NULL) return(Qnil); if (rxp->data_type != RUBY_LIBXML_SRC_TYPE_FILE) return(Qnil); data = (rx_file_data *)rxp->data; return(data->filename); } /* * call-seq: * parser.filename = "filename" * * Set the filename this parser will read from. */ VALUE ruby_xml_parser_filename_set(VALUE self, VALUE filename) { ruby_xml_parser *rxp; ruby_xml_parser_context *rxpc; rx_file_data *data; Check_Type(filename, T_STRING); Data_Get_Struct(self, ruby_xml_parser, rxp); if (rxp->data_type == RUBY_LIBXML_SRC_TYPE_NULL) { if (rxp->data != NULL) rb_fatal("crap, this should be null"); rxp->data_type = RUBY_LIBXML_SRC_TYPE_FILE; data = ALLOC(rx_file_data); rxp->data = data; } else if (rxp->data_type != RUBY_LIBXML_SRC_TYPE_FILE) { return(Qnil); } rxp->ctxt = ruby_xml_parser_context_new3(); data = (rx_file_data *)rxp->data; data->filename = filename; Data_Get_Struct(rxp->ctxt, ruby_xml_parser_context, rxpc); rxpc->ctxt = xmlCreateFileParserCtxt(StringValuePtr(filename)); if (rxpc->ctxt == NULL) rb_sys_fail(StringValuePtr(filename)); return(data->filename); } void ruby_xml_parser_free(ruby_xml_parser *rxp) { void *data; ruby_xml_parser_count--; if (ruby_xml_parser_count == 0) xmlCleanupParser(); switch(rxp->data_type) { case RUBY_LIBXML_SRC_TYPE_NULL: break; case RUBY_LIBXML_SRC_TYPE_FILE: data = (void *)(rx_file_data *)rxp->data; free((rx_file_data *)data); break; case RUBY_LIBXML_SRC_TYPE_STRING: data = (void *)(rx_string_data *)rxp->data; free((rx_string_data *)data); break; case RUBY_LIBXML_SRC_TYPE_IO: data = (void *)(rx_io_data *)rxp->data; free((rx_io_data *)data); break; default: rb_fatal("Unknown data type, %d", rxp->data_type); } free(rxp); } /* * call-seq: * XML::Parser.indent_tree_output => (true|false) * * Determines whether XML output will be indented * (using the string supplied to +default_indent_tree_string+) */ VALUE ruby_xml_parser_indent_tree_output_get(VALUE class) { if (xmlIndentTreeOutput) return(Qtrue); else return(Qfalse); } /* * call-seq: * XML::Parser.indent_tree_output = true|false * * Controls whether XML output will be indented * (using the string supplied to +default_indent_tree_string+) */ VALUE ruby_xml_parser_indent_tree_output_set(VALUE class, VALUE bool) { if (TYPE(bool) == T_TRUE) { xmlIndentTreeOutput = 1; return(Qtrue); } else if (TYPE(bool) == T_FALSE) { xmlIndentTreeOutput = 0; return(Qfalse); } else { rb_raise(rb_eArgError, "invalid argument, must be boolean"); } } /* * call-seq: * parser.io => IO * * Obtain the IO instance this parser works with. */ VALUE ruby_xml_parser_io_get(VALUE self, VALUE io) { ruby_xml_parser *rxp; rx_io_data *data; Data_Get_Struct(self, ruby_xml_parser, rxp); if (rxp->data_type == RUBY_LIBXML_SRC_TYPE_NULL || rxp->data_type != RUBY_LIBXML_SRC_TYPE_IO || rxp->data == NULL) return(Qnil); data = (rx_io_data *)rxp->data; return(data->io); } /* * call-seq: * parser.io = IO * * Set the IO instance this parser works with. */ VALUE ruby_xml_parser_io_set(VALUE self, VALUE io) { ruby_xml_parser *rxp; ruby_xml_parser_context *rxpc; rx_io_data *data; OpenFile *fptr; FILE *f; if (!rb_obj_is_kind_of(io, rb_cIO)) rb_raise(rb_eTypeError, "need an IO object"); Data_Get_Struct(self, ruby_xml_parser, rxp); if (rxp->data_type == RUBY_LIBXML_SRC_TYPE_NULL) { if (rxp->data != NULL) rb_fatal("crap, this should be null"); rxp->data_type = RUBY_LIBXML_SRC_TYPE_IO; data = ALLOC(rx_io_data); rxp->data = data; } else if (rxp->data_type != RUBY_LIBXML_SRC_TYPE_IO) { return(Qnil); } rxp->ctxt = ruby_xml_parser_context_new3(); data = (rx_io_data *)rxp->data; data->io = io; GetOpenFile(io, fptr); rb_io_check_readable(fptr); f = GetWriteFile(fptr); Data_Get_Struct(rxp->ctxt, ruby_xml_parser_context, rxpc); rxpc->ctxt = xmlCreateIOParserCtxt(NULL, NULL, (xmlInputReadCallback) ctxtRead, NULL, f, XML_CHAR_ENCODING_NONE); if (NIL_P(rxpc->ctxt)) rb_sys_fail(0); return(data->io); } void ruby_xml_parser_mark(ruby_xml_parser *rxp) { if (rxp == NULL) return; if (!NIL_P(rxp->ctxt)) rb_gc_mark(rxp->ctxt); switch(rxp->data_type) { case RUBY_LIBXML_SRC_TYPE_NULL: break; case RUBY_LIBXML_SRC_TYPE_FILE: if (!NIL_P(((rx_file_data *)rxp->data)->filename)) rb_gc_mark(((rx_file_data *)rxp->data)->filename); break; case RUBY_LIBXML_SRC_TYPE_STRING: if (!NIL_P(((rx_string_data *)rxp->data)->str)) rb_gc_mark(((rx_string_data *)rxp->data)->str); break; case RUBY_LIBXML_SRC_TYPE_IO: if (!NIL_P(((rx_io_data *)rxp->data)->io)) rb_gc_mark(((rx_io_data *)rxp->data)->io); break; default: rb_fatal("unknown datatype: %d", rxp->data_type); } } /* * call-seq: * XML::Parser.memory_dump => (true|false) * * Perform a parser memory dump (requires memory debugging * support in libxml). */ VALUE ruby_xml_parser_memory_dump(VALUE self) { #ifdef DEBUG_MEMORY_LOCATION xmlMemoryDump(); return(Qtrue); #else rb_warn("libxml was compiled without memory debugging support"); return(Qfalse); #endif } /* * call-seq: * XML::Parser.memory_used => num_bytes * * Perform a parser memory dump (requires memory debugging * support in libxml). */ VALUE ruby_xml_parser_memory_used(VALUE self) { #ifdef DEBUG_MEMORY_LOCATION return(INT2NUM(xmlMemUsed())); #else rb_warn("libxml was compiled without memory debugging support"); return(Qfalse); #endif } /* * call-seq: * XML::Parser.new => parser * * Create a new parser instance with no pre-determined source. */ VALUE ruby_xml_parser_new(VALUE class) { ruby_xml_parser *rxp; ruby_xml_parser_count++; rxp = ALLOC(ruby_xml_parser); rxp->ctxt = Qnil; rxp->data_type = RUBY_LIBXML_SRC_TYPE_NULL; rxp->data = NULL; rxp->parsed = 0; return(Data_Wrap_Struct(class, ruby_xml_parser_mark, ruby_xml_parser_free, rxp)); } /* * call-seq: * XML::Parser.file => parser * * Create a new parser instance that will read the specified file. */ VALUE ruby_xml_parser_new_file(VALUE class, VALUE filename) { VALUE obj; ruby_xml_parser *rxp; rx_file_data *data; obj = ruby_xml_parser_new(class); Data_Get_Struct(obj, ruby_xml_parser, rxp); data = ALLOC(rx_file_data); rxp->data_type = RUBY_LIBXML_SRC_TYPE_FILE; rxp->data = data; ruby_xml_parser_filename_set(obj, filename); return(obj); } /* * call-seq: * XML::Parser.io => parser * * Create a new parser instance that will read from the * specified IO object. */ VALUE ruby_xml_parser_new_io(VALUE class, VALUE io) { VALUE obj; ruby_xml_parser *rxp; rx_io_data *data; obj = ruby_xml_parser_new(class); Data_Get_Struct(obj, ruby_xml_parser, rxp); data = ALLOC(rx_io_data); rxp->data_type = RUBY_LIBXML_SRC_TYPE_IO; rxp->data = data; ruby_xml_parser_io_set(obj, io); return(obj); } /* * call-seq: * XML::Parser.string => parser * * Create a new parser instance that will parse the given * string. */ VALUE ruby_xml_parser_new_string(VALUE class, VALUE str) { VALUE obj; ruby_xml_parser *rxp; rx_string_data *data; obj = ruby_xml_parser_new(class); Data_Get_Struct(obj, ruby_xml_parser, rxp); data = ALLOC(rx_string_data); rxp->data_type = RUBY_LIBXML_SRC_TYPE_STRING; rxp->data = data; ruby_xml_parser_str_set(obj, str); return(obj); } /* * call-seq: * parser.parse => document * * Parse the input XML and create an XML::Document with * it's content. If an error occurs, XML::Parser::ParseError * is thrown. */ VALUE ruby_xml_parser_parse(VALUE self) { ruby_xml_document *rxd; ruby_xml_parser *rxp; ruby_xml_parser_context *rxpc; xmlDocPtr xdp; VALUE doc; Data_Get_Struct(self, ruby_xml_parser, rxp); switch (rxp->data_type) { case RUBY_LIBXML_SRC_TYPE_NULL: return(Qnil); case RUBY_LIBXML_SRC_TYPE_STRING: case RUBY_LIBXML_SRC_TYPE_FILE: case RUBY_LIBXML_SRC_TYPE_IO: Data_Get_Struct(rxp->ctxt, ruby_xml_parser_context, rxpc); if (xmlParseDocument(rxpc->ctxt) == -1) { xmlFreeDoc(rxpc->ctxt->myDoc); rb_raise(eXMLParserParseError, "Document didn't parse"); } xdp = rxpc->ctxt->myDoc; if (!rxpc->ctxt->wellFormed) { xmlFreeDoc(xdp); xdp = NULL; rb_raise(eXMLParserParseError, "Document did not contain well-formed XML"); } else { rxp->parsed = 1; } doc = ruby_xml_document_new(cXMLDocument, xdp); Data_Get_Struct(doc, ruby_xml_document, rxd); rxd->is_ptr = 0; rxd->doc = xdp; break; default: rb_fatal("Unknown data type, %d", rxp->data_type); } return(doc); } /* * call-seq: * parser.context => context * * Obtain the XML::Parser::Context associated with this * parser. */ VALUE ruby_xml_parser_parser_context_get(VALUE self) { ruby_xml_parser *rxp; Data_Get_Struct(self, ruby_xml_parser, rxp); if (rxp->ctxt == Qnil) return(Qnil); else return(rxp->ctxt); } /* * call-seq: * parser.string => "string" * * Obtain the string this parser works with. */ VALUE ruby_xml_parser_str_get(VALUE self) { ruby_xml_parser *rxp; rx_string_data *data; Data_Get_Struct(self, ruby_xml_parser, rxp); if (rxp->data == NULL || rxp->data_type != RUBY_LIBXML_SRC_TYPE_STRING) return(Qnil); data = (rx_string_data *)rxp->data; return(data->str); } /* * call-seq: * parser.string = "string" * * Set the string this parser works with. */ VALUE ruby_xml_parser_str_set(VALUE self, VALUE str) { ruby_xml_parser *rxp; ruby_xml_parser_context *rxpc; rx_string_data *data; Check_Type(str, T_STRING); Data_Get_Struct(self, ruby_xml_parser, rxp); if (rxp->data_type == RUBY_LIBXML_SRC_TYPE_NULL) { rxp->data_type = RUBY_LIBXML_SRC_TYPE_STRING; data = ALLOC(rx_string_data); rxp->data = data; } else if (rxp->data_type != RUBY_LIBXML_SRC_TYPE_STRING) { return(Qnil); } rxp->ctxt = ruby_xml_parser_context_new3(); data = (rx_string_data *)rxp->data; data->str = str; Data_Get_Struct(rxp->ctxt, ruby_xml_parser_context, rxpc); rxpc->ctxt = xmlCreateMemoryParserCtxt(StringValuePtr(data->str), RSTRING(data->str)->len); return(data->str); } /* * call-seq: * XML::Parser.register_error_handler(lambda { |msg| ... }) => old_handler * XML::Parser.register_error_handler(nil) => old_handler * * Register the attached block as the handler for parser errors. * A message describing parse errors is passed to the block. * Libxml passes error messages to the handler in parts, one per call. * A typical error results in six calls to this proc, with arguments: * * "Entity: line 1: ", * "parser ", * "error : ", * "Opening and ending tag mismatch: foo line 1 and foz\n", * "\n", * " ^\n" * * Note that the error handler is shared by all threads. */ VALUE ruby_xml_parser_registerErrorHandler(VALUE self, VALUE proc) { VALUE old_block = libxml_xmlRubyErrorProc; libxml_xmlRubyErrorProc = proc; return(old_block); } static void libxml_xmlErrorFuncHandler(ATTRIBUTE_UNUSED void *ctx, const char *msg, ...) { va_list ap; char str[1000]; VALUE rstr; if (libxml_xmlRubyErrorProc == Qnil) { va_start(ap, msg); vfprintf(stderr, msg, ap); va_end(ap); } else { va_start(ap, msg); if (vsnprintf(str, 999, msg, ap) >= 998) str[999] = 0; va_end(ap); rstr = rb_str_new2(str); rb_funcall2(libxml_xmlRubyErrorProc, id_call, 1, &rstr); } } /* #define RUBY_XML_PARSER_ENABLED_INIT(func, method) \ rb_define_singleton_method(cXMLParser, method, \ ruby_xml_parser_enabled_##func##_q, 0); */ ///#include "cbg.c" /// ///VALUE ruby_register_deb(VALUE self) { /// deb_register_cbg(); /// return(Qtrue); ///} // Rdoc needs to know #ifdef RDOC_NEVER_DEFINED mXML = rb_define_module("XML"); #endif void ruby_init_parser(void) { cXMLParser = rb_define_class_under(mXML, "Parser", rb_cObject); eXMLParserParseError = rb_define_class_under(cXMLParser, "ParseError", rb_eRuntimeError); /* Constants */ rb_define_const(cXMLParser, "LIBXML_VERSION", rb_str_new2(LIBXML_DOTTED_VERSION)); rb_define_const(cXMLParser, "VERSION", rb_str_new2(RUBY_LIBXML_VERSION)); rb_define_const(cXMLParser, "VERNUM", INT2NUM(RUBY_LIBXML_VERNUM)); /* Question-esqe Class Methods */ /* RDoc won't have them defined by a macro... */ rb_define_singleton_method(cXMLParser, "enabled_automata?", ruby_xml_parser_enabled_automata_q, 0); rb_define_singleton_method(cXMLParser, "enabled_c14n?", ruby_xml_parser_enabled_c14n_q, 0); rb_define_singleton_method(cXMLParser, "enabled_catalog?", ruby_xml_parser_enabled_catalog_q, 0); rb_define_singleton_method(cXMLParser, "enabled_debug?", ruby_xml_parser_enabled_debug_q, 0); rb_define_singleton_method(cXMLParser, "enabled_docbook?", ruby_xml_parser_enabled_docbook_q, 0); rb_define_singleton_method(cXMLParser, "enabled_ftp?", ruby_xml_parser_enabled_ftp_q, 0); rb_define_singleton_method(cXMLParser, "enabled_http?", ruby_xml_parser_enabled_http_q, 0); rb_define_singleton_method(cXMLParser, "enabled_html?", ruby_xml_parser_enabled_html_q, 0); rb_define_singleton_method(cXMLParser, "enabled_iconv?", ruby_xml_parser_enabled_iconv_q, 0); rb_define_singleton_method(cXMLParser, "enabled_memory_debug?", ruby_xml_parser_enabled_memory_debug_location_q, 0); rb_define_singleton_method(cXMLParser, "enabled_regexp?", ruby_xml_parser_enabled_regexp_q, 0); rb_define_singleton_method(cXMLParser, "enabled_schemas?", ruby_xml_parser_enabled_schemas_q, 0); rb_define_singleton_method(cXMLParser, "enabled_thread?", ruby_xml_parser_enabled_thread_q, 0); rb_define_singleton_method(cXMLParser, "enabled_unicode?", ruby_xml_parser_enabled_unicode_q, 0); rb_define_singleton_method(cXMLParser, "enabled_xinclude?", ruby_xml_parser_enabled_xinclude_q, 0); rb_define_singleton_method(cXMLParser, "enabled_xpath?", ruby_xml_parser_enabled_xpath_q, 0); rb_define_singleton_method(cXMLParser, "enabled_xpointer?", ruby_xml_parser_enabled_xpointer_q, 0); rb_define_singleton_method(cXMLParser, "enabled_zlib?", ruby_xml_parser_enabled_zlib_q, 0); /* Other Class Methods */ /// rb_define_singleton_method(cXMLParser, "register_deb", /// ruby_register_deb, 0); // TODO Maybe a set of 'xxxx_catalog' aliases might be more Ruby? rb_define_singleton_method(cXMLParser, "catalog_dump", ruby_xml_parser_catalog_dump, 0); rb_define_singleton_method(cXMLParser, "catalog_remove", ruby_xml_parser_catalog_remove, 1); rb_define_singleton_method(cXMLParser, "check_lib_versions", ruby_xml_parser_check_lib_versions, 0); // TODO should this be debug_entities_q / debug_entities_set? // should all these default attribute pairs work that way? rb_define_singleton_method(cXMLParser, "debug_entities", ruby_xml_parser_debug_entities_get, 0); rb_define_singleton_method(cXMLParser, "debug_entities=", ruby_xml_parser_debug_entities_set, 1); rb_define_singleton_method(cXMLParser, "default_compression", ruby_xml_parser_default_compression_get, 0); rb_define_singleton_method(cXMLParser, "default_compression=", ruby_xml_parser_default_compression_set, 1); rb_define_singleton_method(cXMLParser, "default_keep_blanks", ruby_xml_parser_default_keep_blanks_get, 0); rb_define_singleton_method(cXMLParser, "default_keep_blanks=", ruby_xml_parser_default_keep_blanks_set, 1); rb_define_singleton_method(cXMLParser, "default_load_external_dtd", ruby_xml_parser_default_load_external_dtd_set, 0); rb_define_singleton_method(cXMLParser, "default_load_external_dtd=", ruby_xml_parser_default_load_external_dtd_get, 1); rb_define_singleton_method(cXMLParser, "default_line_numbers", ruby_xml_parser_default_line_numbers_get, 0); rb_define_singleton_method(cXMLParser, "default_line_numbers=", ruby_xml_parser_default_line_numbers_set, 1); rb_define_singleton_method(cXMLParser, "default_pedantic_parser", ruby_xml_parser_default_pedantic_parser_get, 0); rb_define_singleton_method(cXMLParser, "default_pedantic_parser=", ruby_xml_parser_default_pedantic_parser_set, 1); rb_define_singleton_method(cXMLParser, "default_substitute_entities", ruby_xml_parser_default_substitute_entities_get, 0); rb_define_singleton_method(cXMLParser, "default_substitute_entities=", ruby_xml_parser_default_substitute_entities_set, 1); rb_define_singleton_method(cXMLParser, "default_tree_indent_string", ruby_xml_parser_default_tree_indent_string_get, 0); rb_define_singleton_method(cXMLParser, "default_tree_indent_string=", ruby_xml_parser_default_tree_indent_string_set, 1); rb_define_singleton_method(cXMLParser, "default_validity_checking", ruby_xml_parser_default_validity_checking_get, 0); rb_define_singleton_method(cXMLParser, "default_validity_checking=", ruby_xml_parser_default_validity_checking_set, 1); rb_define_singleton_method(cXMLParser, "default_warnings", ruby_xml_parser_default_warnings_get, 0); rb_define_singleton_method(cXMLParser, "default_warnings=", ruby_xml_parser_default_warnings_set, 1); rb_define_singleton_method(cXMLParser, "features", ruby_xml_parser_features, 0); rb_define_singleton_method(cXMLParser, "file", ruby_xml_parser_new_file, 1); rb_define_singleton_method(cXMLParser, "indent_tree_output", ruby_xml_parser_indent_tree_output_get, 0); rb_define_singleton_method(cXMLParser, "indent_tree_output=", ruby_xml_parser_indent_tree_output_set, 1); rb_define_singleton_method(cXMLParser, "io", ruby_xml_parser_new_io, 1); rb_define_singleton_method(cXMLParser, "memory_dump", ruby_xml_parser_memory_dump, 0); rb_define_singleton_method(cXMLParser, "memory_used", ruby_xml_parser_memory_used, 0); rb_define_singleton_method(cXMLParser, "new", ruby_xml_parser_new, 0); rb_define_singleton_method(cXMLParser, "string", ruby_xml_parser_new_string, 1); rb_define_singleton_method(cXMLParser, "register_error_handler", ruby_xml_parser_registerErrorHandler, 1); rb_define_method(cXMLParser, "filename", ruby_xml_parser_filename_get, 0); rb_define_method(cXMLParser, "filename=", ruby_xml_parser_filename_set, 1); rb_define_method(cXMLParser, "io", ruby_xml_parser_io_get, 0); rb_define_method(cXMLParser, "io=", ruby_xml_parser_io_set, 1); rb_define_method(cXMLParser, "parse", ruby_xml_parser_parse, 0); rb_define_method(cXMLParser, "parser_context", ruby_xml_parser_parser_context_get, 0); rb_define_method(cXMLParser, "string", ruby_xml_parser_str_get, 0); rb_define_method(cXMLParser, "string=", ruby_xml_parser_str_set, 1); // set up error handling xmlSetGenericErrorFunc(NULL, libxml_xmlErrorFuncHandler); xmlThrDefSetGenericErrorFunc(NULL, libxml_xmlErrorFuncHandler); id_call = rb_intern("call"); } ruby-mkrf-0.2.3.orig/test/sample_files/libxml-ruby-0.3.8/ext/xml/ruby_xml_node_set.h0000644000000000000000000000127711672453175026737 0ustar rootroot/* $Id: ruby_xml_node_set.h,v 1.1 2006/02/21 20:40:16 roscopeco Exp $ */ /* Please see the LICENSE file for copyright and distribution information */ #ifndef __RUBY_XML_NODE_SET__ #define __RUBY_XML_NODE_SET__ extern VALUE cXMLNodeSet; typedef struct ruby_xml_node_set { xmlNodeSetPtr node_set; VALUE xd; VALUE xpath; int data_type; void *data; } ruby_xml_node_set; void ruby_xml_node_set_free(ruby_xml_node_set *rxnset); void ruby_init_xml_node_set(void); VALUE ruby_xml_node_set_new(VALUE class, VALUE xd, VALUE xpath, xmlNodeSetPtr node_set); VALUE ruby_xml_node_set_new2(VALUE xd, VALUE xpath, xmlNodeSetPtr node_set); VALUE ruby_xml_node_set_each(VALUE self); #endif ruby-mkrf-0.2.3.orig/test/sample_files/libxml-ruby-0.3.8/ext/xml/libxml.rb0000644000000000000000000000352411672453175024656 0ustar rootroot# $Id: libxml.rb,v 1.1 2006/04/17 13:30:22 roscopeco Exp $ # Please see the LICENSE file for copyright and distribution information require 'xml/libxml_so' class XML::Node::Set def empty? #:nodoc: self.length <= 0 end def first #:nodoc: self.each { |n| return n } end end class XML::Document include Enumerable # maybe, maybe not... def each(&blk) #:nodoc: find('//*').each(&blk) end end class XML::Node::Set # inefficient, but maybe can find a way to cache the # ary and dump on change? def [](i, count = nil) #:nodoc: if count to_a[i,count] else to_a[i] end end def to_s #:nodoc: to_a.to_s end end module XML::SiblingEnum #:nodoc:all private # Iterates nodes and attributes def siblings(node, &blk) if n = node loop do blk.call(n) break unless n = n.next end end end end class XML::Node include XML::SiblingEnum include Enumerable include Comparable # maybe these don't belong on all nodes... def each_child(&blk) #:nodoc: siblings(child, &blk) end def each_attr(&blk) #:nodoc: siblings(properties, &blk) end # all siblings INCLUDING self def each_sibling(&blk) #:nodoc: siblings(self, &blk) end # I guess this is what you'd expect? alias :each :each_child def to_a #:nodoc: inject([]) { |ary,n| ary << n } end def <=>(other) #:nodoc: to_s <=> other.to_s end end class XML::Attr include XML::SiblingEnum include Enumerable def each_sibling(&blk) #:nodoc: siblings(self,&blk) end alias :each_attr :each_sibling alias :each :each_sibling def to_h #:nodoc: inject({}) do |h,a| h[a.name] = a.value end end def to_a #:nodoc: inject([]) do |ary,a| ary << [a.name, a.value] end end def to_s #:nodoc: "#{name} = #{value}" end end ruby-mkrf-0.2.3.orig/test/sample_files/libxml-ruby-0.3.8/ext/xml/ruby_xml_parser_context.c0000644000000000000000000004162111672453175030167 0ustar rootroot/* $Id: ruby_xml_parser_context.c,v 1.1 2006/02/21 20:40:16 roscopeco Exp $ */ /* Please see the LICENSE file for copyright and distribution information */ #include "libxml.h" #include "ruby_xml_parser_context.h" /* TODO: * * *) xmlParserInput class/structure * *) errNo and mappings * *) validity context * *) record_info or stats class/structure * *) xmlParserNodeInfoSeq * *) xmlParserInputState */ VALUE cXMLParserContext; /* * call-seq: * context.data_directory => "dir" * * Obtain the data directory associated with this context. */ VALUE ruby_xml_parser_context_data_directory_get(VALUE self) { ruby_xml_parser_context *rxpc; Data_Get_Struct(self, ruby_xml_parser_context, rxpc); if (rxpc->ctxt->directory == NULL) return(Qnil); else return(rb_str_new2(rxpc->ctxt->directory)); } /* * call-seq: * context.depth => num * * Obtain the depth of this context. */ VALUE ruby_xml_parser_context_depth_get(VALUE self) { ruby_xml_parser_context *rxpc; Data_Get_Struct(self, ruby_xml_parser_context, rxpc); return(INT2NUM(rxpc->ctxt->depth)); } /* * call-seq: * context.disable_sax? => (true|false) * * Determine whether SAX-based processing is disabled * in this context. */ VALUE ruby_xml_parser_context_disable_sax_q(VALUE self) { ruby_xml_parser_context *rxpc; Data_Get_Struct(self, ruby_xml_parser_context, rxpc); if (rxpc->ctxt->disableSAX) return(Qtrue); else return(Qfalse); } /* * call-seq: * context.doc => document * * Obtain the +XML::Document+ associated with this context. */ VALUE ruby_xml_parser_context_doc_get(VALUE self) { ruby_xml_parser_context *rxpc; Data_Get_Struct(self, ruby_xml_parser_context, rxpc); if (rxpc->ctxt->myDoc == NULL) return(Qnil); return(ruby_xml_document_new4(cXMLDocument, rxpc->ctxt->myDoc)); } /* * call-seq: * context.docbook? => (true|false) * * Determine whether this is a docbook context. */ VALUE ruby_xml_parser_context_docbook_q(VALUE self) { ruby_xml_parser_context *rxpc; Data_Get_Struct(self, ruby_xml_parser_context, rxpc); if (rxpc->ctxt->html == 2) // TODO check this return(Qtrue); else return(Qfalse); } /* * call-seq: * context.encoding => "encoding" * * Obtain the character encoding identifier used in * this context. */ VALUE ruby_xml_parser_context_encoding_get(VALUE self) { ruby_xml_parser_context *rxpc; Data_Get_Struct(self, ruby_xml_parser_context, rxpc); if (rxpc->ctxt->encoding == NULL) return(Qnil); else return(rb_str_new2((const char*)rxpc->ctxt->encoding)); } /* * call-seq: * context.errno => num * * Obtain the last-error number in this context. */ VALUE ruby_xml_parser_context_errno_get(VALUE self) { ruby_xml_parser_context *rxpc; Data_Get_Struct(self, ruby_xml_parser_context, rxpc); return(INT2NUM(rxpc->ctxt->errNo)); } void ruby_xml_parser_context_free(ruby_xml_parser_context *rxpc) { if (rxpc->ctxt != NULL && !rxpc->is_ptr) { xmlFreeParserCtxt(rxpc->ctxt); ruby_xml_parser_count--; rxpc->ctxt = NULL; } if (ruby_xml_parser_count == 0) xmlCleanupParser(); free(rxpc); } /* * call-seq: * context.html? => (true|false) * * Determine whether this is an html context. */ VALUE ruby_xml_parser_context_html_q(VALUE self) { ruby_xml_parser_context *rxpc; Data_Get_Struct(self, ruby_xml_parser_context, rxpc); if (rxpc->ctxt->html == 1) return(Qtrue); else return(Qfalse); } /* * call-seq: * context.max_num_streams => num * * Obtain the limit on the number of IO streams opened in * this context. */ VALUE ruby_xml_parser_context_io_max_num_streams_get(VALUE self) { // TODO alias to max_streams and dep this? ruby_xml_parser_context *rxpc; Data_Get_Struct(self, ruby_xml_parser_context, rxpc); return(INT2NUM(rxpc->ctxt->inputMax)); } /* * call-seq: * context.num_streams => "dir" * * Obtain the actual number of IO streams in this * context. */ VALUE ruby_xml_parser_context_io_num_streams_get(VALUE self) { ruby_xml_parser_context *rxpc; Data_Get_Struct(self, ruby_xml_parser_context, rxpc); return(INT2NUM(rxpc->ctxt->inputNr)); } /* * call-seq: * context.keep_blanks? => (true|false) * * Determine whether parsers in this context retain * whitespace. */ VALUE ruby_xml_parser_context_keep_blanks_q(VALUE self) { ruby_xml_parser_context *rxpc; Data_Get_Struct(self, ruby_xml_parser_context, rxpc); if (rxpc->ctxt->keepBlanks) return(Qtrue); else return(Qfalse); } /* * call-seq: * context.name_depth => num * * Obtain the name depth for this context. */ VALUE ruby_xml_parser_context_name_depth_get(VALUE self) { ruby_xml_parser_context *rxpc; Data_Get_Struct(self, ruby_xml_parser_context, rxpc); return(INT2NUM(rxpc->ctxt->nameNr)); } /* * call-seq: * context.name_depth_max => num * * Obtain the maximum name depth for this context. */ VALUE ruby_xml_parser_context_name_depth_max_get(VALUE self) { ruby_xml_parser_context *rxpc; Data_Get_Struct(self, ruby_xml_parser_context, rxpc); return(INT2NUM(rxpc->ctxt->nameMax)); } /* * call-seq: * context.name_node => "name" * * Obtain the name node for this context. */ VALUE ruby_xml_parser_context_name_node_get(VALUE self) { ruby_xml_parser_context *rxpc; Data_Get_Struct(self, ruby_xml_parser_context, rxpc); if (rxpc->ctxt->name == NULL) return(Qnil); else return(rb_str_new2((const char*)rxpc->ctxt->name)); } /* * call-seq: * context.name_tab => ["name", ..., "name"] * * Obtain the name table for this context. */ VALUE ruby_xml_parser_context_name_tab_get(VALUE self) { int i; ruby_xml_parser_context *rxpc; VALUE tab_ary; Data_Get_Struct(self, ruby_xml_parser_context, rxpc); if (rxpc->ctxt->nameTab == NULL) return(Qnil); tab_ary = rb_ary_new(); for (i = (rxpc->ctxt->nameNr - 1); i >= 0; i--) { if (rxpc->ctxt->nameTab[i] == NULL) continue; else rb_ary_push(tab_ary, rb_str_new2((const char*)rxpc->ctxt->nameTab[i])); } return(tab_ary); } /* * call-seq: * context.node_depth => num * * Obtain the node depth for this context. */ VALUE ruby_xml_parser_context_node_depth_get(VALUE self) { ruby_xml_parser_context *rxpc; Data_Get_Struct(self, ruby_xml_parser_context, rxpc); return(INT2NUM(rxpc->ctxt->nodeNr)); } /* * call-seq: * context.node => node * * Obtain the root node of this context. */ VALUE ruby_xml_parser_context_node_get(VALUE self) { ruby_xml_parser_context *rxpc; Data_Get_Struct(self, ruby_xml_parser_context, rxpc); if (rxpc->ctxt->node == NULL) return(Qnil); else return(ruby_xml_node_new2(cXMLNode, ruby_xml_document_new(cXMLDocument, rxpc->ctxt->myDoc), rxpc->ctxt->node)); } /* * call-seq: * context.node_depth_max => num * * Obtain the maximum node depth for this context. */ VALUE ruby_xml_parser_context_node_depth_max_get(VALUE self) { ruby_xml_parser_context *rxpc; Data_Get_Struct(self, ruby_xml_parser_context, rxpc); return(INT2NUM(rxpc->ctxt->nodeMax)); } /* * call-seq: * context.num_chars => num * * Obtain the number of characters in this context. */ VALUE ruby_xml_parser_context_num_chars_get(VALUE self) { ruby_xml_parser_context *rxpc; Data_Get_Struct(self, ruby_xml_parser_context, rxpc); return(LONG2NUM(rxpc->ctxt->nbChars)); } VALUE ruby_xml_parser_context_new(VALUE class, xmlParserCtxtPtr ctxt) { ruby_xml_parser_context *rxpc; rxpc = ALLOC(ruby_xml_parser_context); ruby_xml_parser_count++; rxpc->ctxt = ctxt; rxpc->is_ptr = 0; return(Data_Wrap_Struct(class, 0, ruby_xml_parser_context_free, rxpc)); } VALUE ruby_xml_parser_context_new2(VALUE class) { return(ruby_xml_parser_context_new(class, NULL)); } VALUE ruby_xml_parser_context_new3() { return(ruby_xml_parser_context_new2(cXMLParserContext)); } /* * call-seq: * context.replace_entities? => (true|false) * * Determine whether external entity replacement is enabled in this * context. */ VALUE ruby_xml_parser_context_replace_entities_q(VALUE self) { ruby_xml_parser_context *rxpc; Data_Get_Struct(self, ruby_xml_parser_context, rxpc); if (rxpc->ctxt->replaceEntities) return(Qtrue); else return(Qfalse); } /* * call-seq: * context.replace_entities = true|false * * Control whether external entity replacement is enabled in this * context. */ VALUE ruby_xml_parser_context_replace_entities_set(VALUE self, VALUE bool) { ruby_xml_parser_context *rxpc; Data_Get_Struct(self, ruby_xml_parser_context, rxpc); if (TYPE(bool) == T_FALSE) { rxpc->ctxt->replaceEntities = 0; return(Qfalse); } else { rxpc->ctxt->replaceEntities = 1; return(Qfalse); } } /* * call-seq: * context.space_depth => num * * Obtain the space depth for this context. */ VALUE ruby_xml_parser_context_space_depth_get(VALUE self) { ruby_xml_parser_context *rxpc; Data_Get_Struct(self, ruby_xml_parser_context, rxpc); return(INT2NUM(rxpc->ctxt->spaceNr)); } /* * call-seq: * context.space_depth => num * * Obtain the maximum space depth for this context. */ VALUE ruby_xml_parser_context_space_depth_max_get(VALUE self) { ruby_xml_parser_context *rxpc; Data_Get_Struct(self, ruby_xml_parser_context, rxpc); return(INT2NUM(rxpc->ctxt->spaceMax)); } /* * call-seq: * context.subset_external? => (true|false) * * Determine whether this context is a subset of an * external context. */ VALUE ruby_xml_parser_context_subset_external_q(VALUE self) { ruby_xml_parser_context *rxpc; Data_Get_Struct(self, ruby_xml_parser_context, rxpc); if (rxpc->ctxt->inSubset == 2) return(Qtrue); else return(Qfalse); } /* * call-seq: * context.subset_internal? => (true|false) * * Determine whether this context is a subset of an * internal context. */ VALUE ruby_xml_parser_context_subset_internal_q(VALUE self) { ruby_xml_parser_context *rxpc; Data_Get_Struct(self, ruby_xml_parser_context, rxpc); if (rxpc->ctxt->inSubset == 1) return(Qtrue); else return(Qfalse); } /* * call-seq: * context.subset_name => "name" * * Obtain this context's subset name (valid only if * either of subset_external? or subset_internal? * is true). */ VALUE ruby_xml_parser_context_subset_name_get(VALUE self) { ruby_xml_parser_context *rxpc; Data_Get_Struct(self, ruby_xml_parser_context, rxpc); if (rxpc->ctxt->intSubName == NULL) return(Qnil); else return(rb_str_new2((const char*)rxpc->ctxt->intSubName)); } /* * call-seq: * context.subset_external_uri => "uri" * * Obtain this context's external subset URI. (valid only if * either of subset_external? or subset_internal? * is true). */ VALUE ruby_xml_parser_context_subset_external_uri_get(VALUE self) { ruby_xml_parser_context *rxpc; Data_Get_Struct(self, ruby_xml_parser_context, rxpc); if (rxpc->ctxt->extSubURI == NULL) return(Qnil); else return(rb_str_new2((const char*)rxpc->ctxt->extSubURI)); } /* * call-seq: * context.subset_external_system_id => "system_id" * * Obtain this context's external subset system identifier. * (valid only if either of subset_external? or subset_internal? * is true). */ VALUE ruby_xml_parser_context_subset_external_system_id_get(VALUE self) { ruby_xml_parser_context *rxpc; Data_Get_Struct(self, ruby_xml_parser_context, rxpc); if (rxpc->ctxt->extSubSystem == NULL) return(Qnil); else return(rb_str_new2((const char*)rxpc->ctxt->extSubSystem)); } /* * call-seq: * context.standalone? => (true|false) * * Determine whether this is a standalone context. */ VALUE ruby_xml_parser_context_standalone_q(VALUE self) { ruby_xml_parser_context *rxpc; Data_Get_Struct(self, ruby_xml_parser_context, rxpc); if (rxpc->ctxt->standalone) return(Qtrue); else return(Qfalse); } /* * call-seq: * context.stats? => (true|false) * * Determine whether this context maintains statistics. */ VALUE ruby_xml_parser_context_stats_q(VALUE self) { ruby_xml_parser_context *rxpc; Data_Get_Struct(self, ruby_xml_parser_context, rxpc); if (rxpc->ctxt->record_info) return(Qtrue); else return(Qfalse); } /* * call-seq: * context.valid? => (true|false) * * Determine whether this context is valid. */ VALUE ruby_xml_parser_context_valid_q(VALUE self) { ruby_xml_parser_context *rxpc; Data_Get_Struct(self, ruby_xml_parser_context, rxpc); if (rxpc->ctxt->valid) return(Qtrue); else return(Qfalse); } /* * call-seq: * context.validate? => (true|false) * * Determine whether validation is enabled in this context. */ VALUE ruby_xml_parser_context_validate_q(VALUE self) { ruby_xml_parser_context *rxpc; Data_Get_Struct(self, ruby_xml_parser_context, rxpc); if (rxpc->ctxt->validate) return(Qtrue); else return(Qfalse); } /* * call-seq: * context.version => "version" * * Obtain this context's version identifier. */ VALUE ruby_xml_parser_context_version_get(VALUE self) { ruby_xml_parser_context *rxpc; Data_Get_Struct(self, ruby_xml_parser_context, rxpc); if (rxpc->ctxt->version == NULL) return(Qnil); else return(rb_str_new2((const char*)rxpc->ctxt->version)); } /* * call-seq: * context.well_formed? => (true|false) * * Determine whether this context contains well-formed XML. */ VALUE ruby_xml_parser_context_well_formed_q(VALUE self) { ruby_xml_parser_context *rxpc; Data_Get_Struct(self, ruby_xml_parser_context, rxpc); if (rxpc->ctxt->wellFormed) return(Qtrue); else return(Qfalse); } // Rdoc needs to know #ifdef RDOC_NEVER_DEFINED mXML = rb_define_module("XML"); cXMLParser = rb_define_class_under(mXML, "Parser", rb_cObject); #endif void ruby_init_xml_parser_context(void) { cXMLParserContext = rb_define_class_under(cXMLParser, "Context", rb_cObject); rb_define_method(cXMLParserContext, "data_directory", ruby_xml_parser_context_data_directory_get, 0); rb_define_method(cXMLParserContext, "depth", ruby_xml_parser_context_depth_get, 0); rb_define_method(cXMLParserContext, "disable_sax?", ruby_xml_parser_context_disable_sax_q, 0); rb_define_method(cXMLParserContext, "doc", ruby_xml_parser_context_doc_get, 0); rb_define_method(cXMLParserContext, "docbook?", ruby_xml_parser_context_docbook_q, 0); rb_define_method(cXMLParserContext, "encoding", ruby_xml_parser_context_encoding_get, 0); rb_define_method(cXMLParserContext, "errno", ruby_xml_parser_context_errno_get, 0); rb_define_method(cXMLParserContext, "html?", ruby_xml_parser_context_html_q, 0); rb_define_method(cXMLParserContext, "io_max_num_streams", ruby_xml_parser_context_io_max_num_streams_get, 0); rb_define_method(cXMLParserContext, "io_num_streams", ruby_xml_parser_context_io_num_streams_get, 0); rb_define_method(cXMLParserContext, "keep_blanks?", ruby_xml_parser_context_keep_blanks_q, 0); rb_define_method(cXMLParserContext, "name_node", ruby_xml_parser_context_name_node_get, 0); rb_define_method(cXMLParserContext, "name_depth", ruby_xml_parser_context_name_depth_get, 0); rb_define_method(cXMLParserContext, "name_depth_max", ruby_xml_parser_context_name_depth_max_get, 0); rb_define_method(cXMLParserContext, "name_tab", ruby_xml_parser_context_name_tab_get, 0); rb_define_method(cXMLParserContext, "node", ruby_xml_parser_context_node_get, 0); rb_define_method(cXMLParserContext, "node_depth", ruby_xml_parser_context_node_depth_get, 0); rb_define_method(cXMLParserContext, "node_depth_max", ruby_xml_parser_context_node_depth_max_get, 0); rb_define_method(cXMLParserContext, "num_chars", ruby_xml_parser_context_num_chars_get, 0); rb_define_method(cXMLParserContext, "replace_entities?", ruby_xml_parser_context_replace_entities_q, 0); rb_define_method(cXMLParserContext, "replace_entities=", ruby_xml_parser_context_replace_entities_set, 1); rb_define_method(cXMLParserContext, "space_depth", ruby_xml_parser_context_space_depth_get, 0); rb_define_method(cXMLParserContext, "space_depth_max", ruby_xml_parser_context_space_depth_max_get, 0); rb_define_method(cXMLParserContext, "subset_external?", ruby_xml_parser_context_subset_external_q, 0); rb_define_method(cXMLParserContext, "subset_external_system_id", ruby_xml_parser_context_subset_external_system_id_get, 0); rb_define_method(cXMLParserContext, "subset_external_uri", ruby_xml_parser_context_subset_name_get, 0); rb_define_method(cXMLParserContext, "subset_internal?", ruby_xml_parser_context_subset_internal_q, 0); rb_define_method(cXMLParserContext, "subset_internal_name", ruby_xml_parser_context_subset_name_get, 0); rb_define_method(cXMLParserContext, "stats?", ruby_xml_parser_context_stats_q, 0); rb_define_method(cXMLParserContext, "standalone?", ruby_xml_parser_context_standalone_q, 0); rb_define_method(cXMLParserContext, "valid", ruby_xml_parser_context_valid_q, 0); rb_define_method(cXMLParserContext, "validate?", ruby_xml_parser_context_validate_q, 0); rb_define_method(cXMLParserContext, "version", ruby_xml_parser_context_version_get, 0); rb_define_method(cXMLParserContext, "well_formed?", ruby_xml_parser_context_well_formed_q, 0); } ruby-mkrf-0.2.3.orig/test/sample_files/libxml-ruby-0.3.8/ext/xml/libxml.c0000644000000000000000000000427111672453175024475 0ustar rootroot/* $Id: libxml.c,v 1.2 2006/04/17 13:30:22 roscopeco Exp $ */ /* Please see the LICENSE file for copyright and distribution information */ #include "libxml.h" /* Ruby's util.h has ruby_strdup */ #include "util.h" #ifdef xmlMalloc #undef xmlMalloc #endif #ifdef xmlRealloc #undef xmlRealloc #endif #ifdef xmlMemStrdup #undef xmlMemStrdup #endif #ifdef xmlMemFree #undef xmlMemFree #endif #ifdef RubyMemMalloc #undef RubyMemMalloc #endif #ifdef RubyMemRealloc #undef RubyMemRealloc #endif #ifdef RubyMemStrdup #undef RubyMemStrdup #endif #ifdef RubyMemFree #undef RubyMemFree #endif #define RubyMemFree ruby_xfree #define RubyMemRealloc ruby_xrealloc #define RubyMemMalloc ruby_xmalloc #define RubyMemStrdup ruby_strdup VALUE mXML; static xmlFreeFunc freeFunc = NULL; static xmlMallocFunc mallocFunc = NULL; static xmlReallocFunc reallocFunc = NULL; static xmlStrdupFunc strdupFunc = NULL; void Init_libxml_so(void) { /* Some libxml memory goo that should be done before anything else */ xmlMemGet((xmlFreeFunc *) & freeFunc, (xmlMallocFunc *) & mallocFunc, (xmlReallocFunc *) & reallocFunc, (xmlStrdupFunc *) & strdupFunc); if (xmlMemSetup((xmlFreeFunc)RubyMemFree, (xmlMallocFunc)RubyMemMalloc, (xmlReallocFunc)RubyMemRealloc, (xmlStrdupFunc)RubyMemStrdup) != 0) rb_fatal("could not install the memory handlers for libxml"); xmlInitParser(); mXML = rb_define_module("XML"); rb_define_const(mXML, "XML_NAMESPACE", rb_str_new2((const char*)XML_XML_NAMESPACE)); ruby_init_parser(); ruby_init_xml_parser_context(); ruby_init_xml_attr(); ruby_init_xml_attribute(); ruby_init_xml_document(); ruby_init_xml_node(); ruby_init_xml_node_set(); ruby_init_xml_ns(); ruby_init_xml_sax_parser(); ruby_init_xml_tree(); ruby_init_xml_xinclude(); ruby_init_xml_xpath(); ruby_init_xml_xpath_context(); ruby_init_xml_xpointer(); ruby_init_xml_xpointer_context(); ruby_init_input_callbacks(); /* MUFF */ ruby_init_xml_dtd(); /* MUFF */ ruby_init_xml_schema(); /* MUFF */ ruby_xml_parser_default_substitute_entities_set(cXMLParser, Qtrue); ruby_xml_parser_default_load_external_dtd_set(cXMLParser, Qtrue); } ruby-mkrf-0.2.3.orig/test/sample_files/libxml-ruby-0.3.8/ext/xml/ruby_xml_sax_parser.h0000644000000000000000000000250311672453175027277 0ustar rootroot/* $Id: ruby_xml_sax_parser.h,v 1.2 2006/04/14 14:45:52 roscopeco Exp $ */ /* Please see the LICENSE file for copyright and distribution information */ #ifndef __RUBY_XML_SAX_PARSER__ #define __RUBY_XML_SAX_PARSER__ extern VALUE cXMLSaxParser; typedef struct ruby_xml_sax_parser_callbacks { VALUE internalSubset; VALUE isStandalone; VALUE hasInternalSubset; VALUE hasExternalSubset; VALUE resolveEntity; VALUE getEntity; VALUE entityDecl; VALUE notationDecl; VALUE attributeDecl; VALUE elementDecl; VALUE unparsedEntityDecl; VALUE setDocumentLocator; VALUE startDocument; VALUE endDocument; VALUE startElement; VALUE endElement; VALUE reference; VALUE characters; VALUE ignorableWhitespace; VALUE processingInstruction; VALUE comment; VALUE xmlParserWarning; VALUE xmlParserError; VALUE xmlParserFatalError; VALUE getParameterEntity; VALUE cdataBlock; VALUE externalSubset; } ruby_xml_sax_parser_callbacks; typedef struct ruby_xml_sax_parser { xmlParserCtxtPtr xpc; xmlSAXHandlerPtr xsh; ruby_xml_sax_parser_callbacks *cbp; VALUE filename; VALUE str; } ruby_xml_sax_parser; void ruby_xml_sax_parser_free(ruby_xml_sax_parser *rxsp); void ruby_init_xml_sax_parser(void); VALUE ruby_xml_sax_parser_new(VALUE class); #endif ruby-mkrf-0.2.3.orig/test/sample_files/libxml-ruby-0.3.8/ext/xml/ruby_xml_xpath.h0000644000000000000000000000122211672453175026251 0ustar rootroot/* $Id: ruby_xml_xpath.h,v 1.1 2006/02/21 20:40:16 roscopeco Exp $ */ /* Please see the LICENSE file for copyright and distribution information */ #ifndef __RUBY_XML_XPATH__ #define __RUBY_XML_XPATH__ extern VALUE cXMLXPath; extern VALUE eXMLXPathInvalidPath; typedef struct ruby_xml_xpath { VALUE xd; VALUE ctxt; xmlXPathObjectPtr xpop; } ruby_xml_xpath; void ruby_xml_xpath_free(ruby_xml_xpath *rxxp); VALUE ruby_xml_xpath_find(int argc, VALUE *argv, VALUE class); VALUE ruby_xml_xpath_find2(int argc, VALUE *argv); VALUE ruby_xml_xpath_new(VALUE class, VALUE xd, VALUE xxpc, xmlXPathObjectPtr xpop); void ruby_init_xml_xpath(void); #endif ruby-mkrf-0.2.3.orig/test/sample_files/libxml-ruby-0.3.8/CHANGELOG0000644000000000000000000000565411672453175022662 0ustar rootroot===== 15.4.2006 Ross Bamford * Implemented SAX parser callback handling ===== 12.4.2006 Ross Bamford * Integrated / tested community patches * Defined XML::Node (hash) equality in terms of XML representation ===== 12.4.2006 Tim Yamin (patches) * Fixed XML::Node#content inoperable bug * Fixed memory leak in same ===== 12.4.2006 Mark Van Holstyn (patches) * Added XML::Node::Set#first * Added XML::Node::Set#empty? * Fixes to XML::Node::Set#to_a * Added XML::Node#find_first * Added XML::Node#remove! ===== 27.3.2006 Ross Bamford * Integrated contributed XML::Parser.register_error_handler patch ===== 27.2.2006 Ross Bamford * Fixed all multiple symbol definitions for -fno-common. * Removed OSX -fno-common workaround. ===== 21.2.2006 Ross Bamford * Patched extconf.rb with OSX -fno-common workaround * Added gem and packaging support to Rakefile * Moved version update to Rakefile * Removed legacy project utility scripts ===== 19.2.2006 Ross Bamford * Fixed doublefree bug in ruby_xml_attr. * Fixed small leak in parser ===== 18.12.2005 Ross Bamford * Updated for GCC 4.0 (community patches) * Fixed default validation bug * Refactored project, removed outdated files, cleaned up tests. * Added RDoc documentation across .c files. * Fixed up a few strings. ===== 14.4.2004 Mangler Jurgen * ruby_xml_node.cz: fixed ruby_xml_node_property_set. The ill-behaviour was, that there was added a second attribute of the same name, when you were setting the value of an already existing attribute. ===== 17.3.2004 Lukas Svoboda * ruby_xml_node.c: ruby_xml_node_to_s now returns XML subtree dump. ===== 27.2.2004 Martin Povolny * ruby_xml_node.c: added XML::Node.copy, this makes possible building of xml documents from nodes taken from other xml documents without making ruby SIGSEGV (see tests/copy_bug.rb). ===== 26.2.2004 Martin Povolny * ruby_xml_dtd.c, ruby_xml_dtd.h, ruby_xml_schema.c, ruby_xml_schema.h: more work on validation, now you can actually validate document using dtd or xml schema, also solved warning and error propagation (see tests/{dtd|schema}-test.rb). ===== 30.12.2003 Martin Povolny * ruby_xml_dtd.c, ruby_xml_dtd.h, ruby_xml_schema.c, ruby_xml_schema.h: prelimitary support for dtd and schema validation ===== 15.9.2003 Martin Povolny * ruby_xml_input_cbg.c, libxml.c: added class InputCallbacks to make possible registering custom input callbacks handlers (xmlRegisterInputCallbacks) written in ruby ===== 1.8.2003 Martin Povolny * ruby_xml_document.c: corrected argument handling in ruby_xml_document_find * ruby_xml_node.c: corrected argument handling in ruby_xml_node_find ruby-mkrf-0.2.3.orig/test/sample_files/libxml-ruby-0.3.8/LICENSE0000644000000000000000000000232711672453175022447 0ustar rootroot# $Id: LICENSE,v 1.3 2006/02/28 09:57:52 roscopeco Exp $ Copyright (c) 2002-2006 Sean Chittenden and contributors Copyright (c) 2001 Wai-Sun "Squidster" Chia Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. ruby-mkrf-0.2.3.orig/test/sample_files/libtrivial/0000755000000000000000000000000011672453175020665 5ustar rootrootruby-mkrf-0.2.3.orig/test/sample_files/libtrivial/ext/0000755000000000000000000000000011672453175021465 5ustar rootrootruby-mkrf-0.2.3.orig/test/sample_files/libtrivial/ext/extconf.rb0000644000000000000000000000013411672453175023456 0ustar rootrootrequire File.dirname(__FILE__) + '/../../../../lib/mkrf' Mkrf::Generator.new('libtrivial') ruby-mkrf-0.2.3.orig/test/sample_files/libtrivial/ext/libtrivial.c0000644000000000000000000000013011672453175023764 0ustar rootroot#include "ruby.h" void Init_libtrivial() { rb_define_class("MyClass", rb_cObject); }ruby-mkrf-0.2.3.orig/test/sample_files/cpp_bang/0000755000000000000000000000000011672453175020275 5ustar rootrootruby-mkrf-0.2.3.orig/test/sample_files/cpp_bang/ext/0000755000000000000000000000000011672453175021075 5ustar rootrootruby-mkrf-0.2.3.orig/test/sample_files/cpp_bang/ext/mkrf_config.rb0000644000000000000000000000020111672453175023677 0ustar rootrootrequire 'rubygems' require 'mkrf' Mkrf::Generator.new('bang', ["*.cpp"]) do |g| g.ldshared << ' -L/usr/lib -lgcc -lstdc++' endruby-mkrf-0.2.3.orig/test/sample_files/cpp_bang/ext/bang.cpp0000644000000000000000000000045511672453175022514 0ustar rootroot#include #include VALUE rk_mBang; static VALUE t_bang(VALUE self) { return rb_str_new2("Bang !"); } extern "C" void Init_bang() { // define the class 'Hello' rk_mBang = rb_define_module("Bang"); rb_define_singleton_method(rk_mBang, "bang", (VALUE(*)(...))t_bang, 0); } ruby-mkrf-0.2.3.orig/test/fixtures/0000755000000000000000000000000011672453200015717 5ustar rootrootruby-mkrf-0.2.3.orig/test/fixtures/stdmkrf.h0000644000000000000000000000001411672453175017550 0ustar rootroot#define MKRFruby-mkrf-0.2.3.orig/test/fixtures/down_a_directory/0000755000000000000000000000000011672453175021265 5ustar rootrootruby-mkrf-0.2.3.orig/test/fixtures/down_a_directory/header_down_a_directory.h0000644000000000000000000000003011672453175026272 0ustar rootroot#define DOWN_A_DIRECTORYruby-mkrf-0.2.3.orig/test/abstract_unit.rb0000644000000000000000000000166111672453175017254 0ustar rootroot$:.unshift(File.dirname(__FILE__) + '/../lib') require 'test/unit' require File.dirname(__FILE__) + '/../lib/mkrf' $debug = false class Test::Unit::TestCase def silence_command_line yield and return if $debug silence_stream(STDERR) do silence_stream(STDOUT) do yield end end end # silence_stream taken from Rails ActiveSupport reporting.rb # Silences any stream for the duration of the block. # # silence_stream(STDOUT) do # puts 'This will never be seen' # end # # puts 'But this will' def silence_stream(stream) old_stream = stream.dup stream.reopen(RUBY_PLATFORM =~ /mswin/ ? 'NUL:' : '/dev/null') stream.sync = true yield ensure stream.reopen(old_stream) end protected def assert_creates_file(file) assert !File.exist?(file), "#{file} already exists!" yield assert File.exist?(file), "#{file} wasn't created!" end endruby-mkrf-0.2.3.orig/CHANGELOG0000644000000000000000000000451511672453175014321 0ustar rootroot= 0.2.0 2/3/07 * [NEW] Added paths option to has_library? * [NEW] Added Generator#abort! for when that critical library just isn't there. * [NEW] Added Zed Shaw's rakehelper lib (from rfuzz, with permission). * [NEW] Extensions using mkrf can now build properly in RubyGems! * [NEW] Extension configurations should now be named mkrf_conf.rb to not conflict with the mkmf settings in RubyGems. * [NEW] Added example extensions. * [NEW] Add install task to generated Rakefile. * [CLEAN-UP] Preprocessor define handling moved to Availability from Generator. * [NEW] find_executable method added to Availability. * [FIXED] include_header and has_header? now properly set preprocessor defines. * [FIXED] Defaults on Generator work again. [44] = 0.1.2 10/4/06 * [CLEAN-UP] Removed useless add_source from Generator. We've got a constructor for that. [35] * [FIXED] Generator.new defaults to sources in the local directory, so extconfs can (and should) be kept in the same directory as the extension files (PROJ_ROOT/ext) and won't clobber pre-existing Rakefiles. [35] * [CLEAN-UP] Goodbye ugly method missing hack! [34] * [NEW] Added objs to Generator. Access it as you might ldshared. -John G. * [CLEAN-UP] Removed the redundant .so rule from the generated rakefile. -John G. * [FIXED] Correct defaults in Availability. [29] * [NEW] Using rubylibdir from rbconfig for library path. [29] * [NEW] Availability options go straight to availability without generator modification. [29] * [NEW] CFLAGS and LDSHARED now accessible through .cflags and .ldshared. [29] * [FIXED] Can now modify Availability options from the Generator contructor. This may break some existing extconfs that use the .new('libname','file_pattern_a', 'file_pattern_b') syntax. File patterns now only belong as arg 2 and should be an array or a string. * [FIXED] No indenting generated Rakefile code. [26] = 0.1.1 8/17/06 * [NEW] Logging! [22] * [NEW] Added a description to the extension building task [21] * [NEW] Added additional_code accessor. If you have stuff you want to add to the Rakefile the Generator doesn't provide for, feel free to stick it (in string form!) in here. Yes, I know this smells. If you have a better suggestion, PDI and tell me about it. [20] * [FIXED] Use proper file extensions for libraries. [18] * [FIXED] default source pattern for Generator [17] = 0.1.0 6/28/06 * First release. ruby-mkrf-0.2.3.orig/lib/0000755000000000000000000000000011672453175013650 5ustar rootrootruby-mkrf-0.2.3.orig/lib/mkrf/0000755000000000000000000000000011672453175014607 5ustar rootrootruby-mkrf-0.2.3.orig/lib/mkrf/availability.rb0000644000000000000000000002646111672453175017617 0ustar rootrootrequire 'rbconfig' require 'logger' module Mkrf # The +Availability+ class is concerned with libraries, headers, and # functions. It can be easily wrapped (see Mkrf::Generator for an # example) and should be able to be used as a basis for a variety of programs # which need to determine functionality based on what libraries are available # on the current system. class Availability # ruby 1.9+ if Config::CONFIG['rubyhdrdir'] DEFAULT_INCLUDES = [Config::CONFIG['rubyhdrdir'], Config::CONFIG['rubyhdrdir'] + "/" + Config::CONFIG['arch'], Config::CONFIG["archdir"],Config::CONFIG['sitelibdir'], "."] else DEFAULT_INCLUDES = [Config::CONFIG['includedir'], Config::CONFIG["archdir"], Config::CONFIG['sitelibdir'], "."] end # These really shouldn't be static like this.. TEMP_SOURCE_FILE = "temp_source.c" TEMP_EXECUTABLE = "temp_executable" attr_reader :headers, :loaded_libs, :includes, :logger, :defines # Create a new Availability instance. # # Valid keys for the options hash include: # * :loaded_libs -- libraries to load by default # * :library_paths -- libraries paths to include by default # * :headers -- headers to load by default # * :compiler -- which compiler to use when determining availability # * :includes -- directories that should be searched for include files def initialize(options = {}) @loaded_libs = [(options[:loaded_libs] || Config::CONFIG["LIBS"].gsub('-l', '').split)].flatten @library_paths = [(options[:library_paths] || [])].flatten # Not sure what COMMON_HEADERS looks like when populated @headers = options[:headers] || [] # Config::CONFIG["COMMON_HEADERS"] @compiler = options[:compiler] || Config::CONFIG["CC"] @includes = [(options[:includes] || DEFAULT_INCLUDES)].flatten @logger = Logger.new('mkrf.log') @defines = [] end # Include a library in the list of available libs. Returns +false+ if the # library is not available. Returns non-false otherwise. # # Params: # * library -- the library to be included as a string. # * function -- a method to base the inclusion of the library on. +main+ by default. # * paths -- an optional list of search paths if the library is not found in the default paths. def include_library(library, function = "main", *paths) paths.each do |library_dir| @library_paths << library_dir end @loaded_libs << library if has_library?(library, function) end # Include a header in the list of availiable headers. Returns +false+ if the # header is not available. Returns non-false otherwise. If the header is # found, the preprocessor constant HAVE_BLAH is defined where BLAH is the name # of the header in uppercase without the file extension. # # Params: # * header -- the name of the header to be included as a string. # * paths -- an optional list of search paths if the header is not found in the default paths. def include_header(header, *paths) @headers << header if has_header?(header, *paths) end # Returns a boolean whether indicating whether the library can be found # by attempting to reference the function passed (+main+ by default). # # Params: # * library -- the library to be included as a string # * function -- a method to base the inclusion of the library on. +main+ by default. # * paths -- an optional list of search paths if the library is not found in the default paths def has_library?(library, function = "main", *paths) logger.info "Checking for library: #{library}" return true if library_already_loaded?(library) return true if RUBY_PLATFORM =~ /mswin/ # TODO: find a way on windows # Should this be only found_library? or a specialized version with # path searching? found_library?(library, function) end # Returns +true+ if the header is found in the default search path or in # optional paths passed as an argument, +false+ otherwise. If the header is # found, the preprocessor constant HAVE_BLAH is defined where BLAH is the name # of the header in uppercase without the file extension. # # Params: # * header -- the header to be searched for # * paths -- an optional list of search paths if the header is not found in the default paths def has_header?(header, *paths) if header_already_loaded?(header) || header_can_link?(header) || header_found_in_paths?(header, paths) defines << format("HAVE_%s", header.tr("a-z./\055", "A-Z___")) return true end logger.warn "Header not found: #{header}" return false end # Returns +true+ if the function is able to be called based on libraries and # headers currently loaded. Returns +false+ otherwise. # # Params: # * function -- the function to check for def has_function?(function) if can_link?(simple_call(function)) or can_link?(simple_reference(function)) logger.info "Function found: #{function}()" return true else logger.warn "Function not found: #{function}()" return false end end # Returns the result of an attempt to compile and link the function body # passed in def can_link?(function_body) silence_command_line do create_source(function_body) system(link_command) end ensure FileUtils.rm_f TEMP_SOURCE_FILE FileUtils.rm_f TEMP_EXECUTABLE end def with_headers(*args, &b) with_stackable_attribute('headers', *args, &b) end def with_loaded_libs(*args, &b) with_stackable_attribute('loaded_libs', *args, &b) end def with_includes(*args, &b) with_stackable_attribute('includes', *args, &b) end # Returns a string of libraries formatted for compilation def library_compile_string if RUBY_PLATFORM =~ /mswin/ @loaded_libs.join(' ') else @loaded_libs.collect {|l| "-l#{l}"}.join(' ') end end # Returns a string of libraries directories formatted for compilation def library_paths_compile_string if RUBY_PLATFORM =~ /mswin/ @library_paths.collect {|l| "/libpath:#{l}"}.join(' ') else @library_paths.collect {|l| "-L#{l}"}.join(' ') end end def ldshared_string if RUBY_PLATFORM =~ /mswin/ "link -nologo -incremental:no -debug -opt:ref -opt:icf -dll" else Config::CONFIG['LDSHARED'] end end def ld_outfile(filename) # :nodoc: if RUBY_PLATFORM =~ /mswin/ "-out:#{filename}" else "-o #{filename}" end end # Returns a string of include directories formatted for compilation def includes_compile_string @includes.collect {|i| "-I#{i}"}.join(' ') end # Takes the name of an executable and an optional set of paths to search. # If no paths are given, the environmental path is used by default. # Returns the absolute path to an executable, or nil if not found. def find_executable(bin, *paths) paths = ENV['PATH'].split(File::PATH_SEPARATOR) if paths.empty? paths.each do |path| file = File.join(path, bin) return file if File.executable?(file) end return nil end private def found_library?(library, function) library_found = with_loaded_libs(library) { has_function? function } library_found ? logger.info("Library found: #{library}") : logger.warn("Library not found: #{library}") library_found end def header_can_link?(header) has_header = with_headers(header) { can_link?(simple_include(header)) } if has_header logger.info("Header found: #{header}") return true end end def library_already_loaded?(library) if @loaded_libs.include? library logger.info "Library already loaded: #{library}" return true end return false end def header_already_loaded?(header) if @headers.include? header logger.info("Header already loaded: #{header}") return true end return false end # def library_found_in_paths?(library, paths) # paths.each do |include_path| # # if with_libs(include_path) { library_can_link?(header) } # @libspath << include_path # return true # end # end # # return false # # end def header_found_in_paths?(header, paths) paths.each do |include_path| if with_includes(include_path) { header_can_link?(header) } @includes << include_path return true end end return false end def with_stackable_attribute(attribute, *args) args = args.to_a instance_variable_set("@#{attribute}", instance_variable_get("@#{attribute}") + args) value = yield instance_variable_set("@#{attribute}", instance_variable_get("@#{attribute}") - args) return value end def header_include_string @headers.collect {|header| "#include <#{header}>"}.join('\n') end def link_command # This current implementation just splats the library_paths in # unconditionally. Is this problematic? "#{@compiler} -o #{TEMP_EXECUTABLE} #{library_paths_compile_string}" + " #{library_compile_string} #{includes_compile_string}" + " #{TEMP_SOURCE_FILE}" end # Creates a temporary source file with the string passed def create_source(src) File.open(TEMP_SOURCE_FILE, "w+") do |f| f.write(src) end end # Basic skeleton for calling a function def simple_call(func) src = <<-SRC #{header_include_string} int main() { return 0; } int t() { #{func}(); return 0; } SRC end # Basic skeleton for referencing a function def simple_reference(func) src = <<-SRC #{header_include_string} int main() { return 0; } int t() { void ((*volatile p)()); p = (void ((*)()))#{func}; return 0; } SRC end # skeleton for testing includes def simple_include(header) src = <<-SRC #{header_include_string} #include <#{header}> int main() { return 0; } SRC end def silence_command_line yield and return if $debug silence_stream(STDERR) do silence_stream(STDOUT) do yield end end end # silence_stream taken from Rails ActiveSupport reporting.rb # Silences any stream for the duration of the block. # # silence_stream(STDOUT) do # puts 'This will never be seen' # end # # puts 'But this will' def silence_stream(stream) old_stream = stream.dup stream.reopen(RUBY_PLATFORM =~ /mswin/ ? 'NUL:' : '/dev/null') stream.sync = true yield ensure stream.reopen(old_stream) end end end ruby-mkrf-0.2.3.orig/lib/mkrf/generator.rb0000644000000000000000000001471111672453175017126 0ustar rootrootrequire 'rubygems' require 'rbconfig' require 'rake/tasklib' module Mkrf # +Generator+ is concerned with taking configuration for an extension # and writing a +Rakefile+ to the local filesystem which will later be # used to build the extension. # # You will typically only create one +Generator+ per extconf.rb # file, which in turn will generate a Rakefile for building one extension # module. # # = Usage # # In the most basic usage, +Generator+ simply takes the name of the library # to compile: # # require 'mkrf' # Mkrf::Generator.new('libtrivial') # # Configuration of the build can be passed to the +Generator+ constructor # as a block: # # Mkrf::Generator.new('libxml') do |g| # g.include_library('socket','socket') # g.include_header('libxml/xmlversion.h', # '/opt/include/libxml2', # '/usr/local/include/libxml2', # '/usr/include/libxml2') # end # # It is also possible to specify the library paths in # include_library # Mkrf::Generator.new('libxml') do |g| # g.include_library('socket','socket', '/usr/local/lib/libxml') # end # class Generator include Rake CONFIG = Config::CONFIG # Any extra code, given as a string, to be appended to the Rakefile. attr_accessor :additional_code # You may append to these attributes directly in your Generator.new block, # for example: g.objects << ' ../common/foo.o ../common/bar.so -lmystuff' or # g.cflags << ' -ansi -Wall' # # Note the extra space at the beginning of those strings. attr_accessor :cflags # +objects+ is for adding _additional_ object files to the link-edit command -- outside # of the ones that correspond to the source files. attr_accessor :objects # Any additional options you'd like appended to your system-specific linker command # (which is used to build the shared library). attr_accessor :ldshared # Create a +Generator+ object which writes a Rakefile to the current directory of the local # filesystem. # # Params: # * +extension_name+ -- the name of the extension # * +source_patterns+ -- an array of patterns describing source files to be compiled. ["*.c"] is the default. def initialize(extension_name, source_patterns = ["*.c"], availability_options = {}) @sources = source_patterns @extension_name = extension_name + ".#{CONFIG['DLEXT']}" @available = Mkrf::Availability.new(availability_options) @defines = [] if @sources[0] =~ /cpp/ @cc = 'g++' # should be in CONFIG['C++'] but is not. @source_extension = 'cpp' else @cc = CONFIG['CC'] @source_extension = 'c' end @objects = '' @ldshared = '' @cflags = "#{CONFIG['CCDLFLAGS']} #{CONFIG['CFLAGS']} #{CONFIG['ARCH_FLAG']}" yield self if block_given? write_rakefile end # An array of the source patterns as single quoted strings def sources @sources.collect {|s| "'#{s}'"} end # Add a define to the compile string. Example: # # Mkrf::Generator.new('my_library') do |g| # g.add_define('HAVE_PTHREADS') # end # # Params: # * +defn+ -- string to add to compile time defines def add_define(defn) @available.defines << defn end # Include a library in the compile. Returns +false+ if the # library is not available. Returns non-false otherwise. # Parameters are the same as Mkrf::Availability#include_library def include_library(*args) @available.include_library(*args) end # Include a header in the compile. Returns +false+ if the header is not # available, returns non-false otherwise. As a side effect, a compile # time define occurs as +HAVE_+ appended with the name of the header in # upper and scored case. # Parameters are the same as Mkrf::Availability#include_header def include_header(*args) @available.include_header(*args) end # Returns +true+ if the function is able to be called based on libraries and # headers currently loaded. Returns +false+ otherwise. # # Params: # * function -- the function to check for def has_function?(function) @available.has_function? function end # Returns mkrf's logger instance. You can use this to set logging levels. # # Mkrf::Generator.new('libsomethin') do |g| # g.logger.level = Logger::WARN # end # def logger @available.logger end # Logs a fatal error and exits with a non-zero code (defaults to 1) def abort!(str, code = 1) logger.fatal str exit code end def write_rakefile(filename = "Rakefile") # :nodoc: File.open(filename, "w+") do |f| f.puts rakefile_contents end logger.info "Rakefile written" end def defines_compile_string # :nodoc: @available.defines.collect {|define| "-D#{define}"}.join(' ') end def library_path(path) # :nodoc: if RUBY_PLATFORM =~ /mswin/ "-libpath:#{path}" else "-L#{path}" end end def rakefile_contents # :nodoc: objext = CONFIG['OBJEXT'] <<-END_RAKEFILE # Generated by mkrf require 'rake/clean' CLEAN.include('*.#{objext}') CLOBBER.include('#{@extension_name}', 'mkrf.log') SRC = FileList[#{sources.join(',')}] OBJ = SRC.ext('#{objext}') CC = '#{@cc}' ADDITIONAL_OBJECTS = '#{objects}' LDSHARED = "#{@available.ldshared_string} #{ldshared}" LIBPATH = "#{library_path(CONFIG['libdir'])} #{@available.library_paths_compile_string}" INCLUDES = "#{@available.includes_compile_string}" LIBS = "#{@available.library_compile_string}" CFLAGS = "#{cflags} #{defines_compile_string}" RUBYARCHDIR = "\#{ENV["RUBYARCHDIR"]}" LIBRUBYARG_SHARED = "#{CONFIG['LIBRUBYARG_SHARED']}" task :default => ['#{@extension_name}'] rule '.#{objext}' => '.#{@source_extension}' do |t| sh "\#{CC} \#{CFLAGS} \#{INCLUDES} -c \#{t.source}" end desc "Build this extension" file '#{@extension_name}' => OBJ do sh "\#{LDSHARED} \#{LIBPATH} #{@available.ld_outfile(@extension_name)} \#{OBJ} \#{ADDITIONAL_OBJECTS} \#{LIBS} \#{LIBRUBYARG_SHARED}" end desc "Install this extension" task :install => '#{@extension_name}' do makedirs "\#{RUBYARCHDIR}" install "#{@extension_name}", "\#{RUBYARCHDIR}" end #{additional_code} END_RAKEFILE end end end ruby-mkrf-0.2.3.orig/lib/mkrf/rakehelper.rb0000644000000000000000000000473011672453175017262 0ustar rootroot# # Copyright (c) 2005 Zed A. Shaw with portions by Kevin Clark # You can redistribute it and/or modify it under the same terms as Ruby. # def rake(rakedir) Dir.chdir(rakedir) do sh 'rake' end end def mkrf_conf(dir) Dir.chdir(dir) do ruby "mkrf_conf.rb" end end def setup_tests Rake::TestTask.new do |t| t.libs << "test" t.test_files = FileList['test/test*.rb'] t.verbose = true end end def setup_clean otherfiles files = ['build/*', '**/*.o', '**/*.so', '**/*.a', 'lib/*-*', '**/*.log'] + otherfiles CLEAN.include(files) end def setup_rdoc files Rake::RDocTask.new do |rdoc| rdoc.rdoc_dir = 'doc/rdoc' rdoc.options << '--line-numbers' rdoc.rdoc_files.add(files) end end def setup_extension(dir, extension) ext = "ext/#{dir}" ext_so = "#{ext}/#{extension}.#{Config::CONFIG['DLEXT']}" ext_files = FileList[ "#{ext}/*.c", "#{ext}/*.h", "#{ext}/mkrf_conf.rb", "#{ext}/Rakefile", "lib" ] task "lib" do directory "lib" end desc "Builds just the #{extension} extension" task extension.to_sym => ["#{ext}/Rakefile", ext_so ] file "#{ext}/Rakefile" => ["#{ext}/mkrf_conf.rb"] do mkrf_conf "#{ext}" end file ext_so => ext_files do rake "#{ext}" cp ext_so, "lib" end end def base_gem_spec(pkg_name, pkg_version) rm_rf "test/coverage" pkg_version = pkg_version pkg_name = pkg_name pkg_file_name = "#{pkg_name}-#{pkg_version}" Gem::Specification.new do |s| s.name = pkg_name s.version = pkg_version s.platform = Gem::Platform::RUBY s.has_rdoc = true s.extra_rdoc_files = [ "README" ] s.files = %w(Rakefile) + Dir.glob("{bin,doc/rdoc,ext,examples}/**/*") + Dir.glob("tools/*.rb") + Dir.glob(RUBY_PLATFORM !~ /mswin/ ? "lib/**/*.rb" : "lib/**/*") s.require_path = "lib" s.bindir = "bin" end end def setup_gem(pkg_name, pkg_version) spec = base_gem_spec(pkg_name, pkg_version) yield spec if block_given? Rake::GemPackageTask.new(spec) do |p| p.gem_spec = spec p.need_tar = true if RUBY_PLATFORM !~ /mswin/ end end def sub_project(project, *targets) targets.each do |target| Dir.chdir "projects/#{project}" do sh %{rake --trace #{target.to_s} } end end end # Conditional require rcov/rcovtask if present begin require 'rcov/rcovtask' Rcov::RcovTask.new do |t| t.test_files = FileList['test/test*.rb'] t.rcov_opts << "-x /usr" t.output_dir = "test/coverage" end rescue Object end ruby-mkrf-0.2.3.orig/lib/mkrf.rb0000644000000000000000000000020211672453175015126 0ustar rootrootrequire File.dirname(__FILE__) + '/mkrf/availability' require File.dirname(__FILE__) + '/mkrf/generator' Mkrf::VERSION = "0.2.3" ruby-mkrf-0.2.3.orig/metadata.yml0000644000000000000000000002633011672453175015411 0ustar rootroot--- !ruby/object:Gem::Specification rubygems_version: 0.9.2 specification_version: 1 name: mkrf version: !ruby/object:Gem::Version version: 0.2.3 date: 2008-01-06 00:00:00 +09:00 summary: Generate Rakefiles to Build C Extensions to Ruby require_paths: - lib email: kevin.clark@gmail.com homepage: http://glu.ttono.us rubyforge_project: mkrf description: This proposed replacement to mkmf generates Rakefiles to build C Extensions. autorequire: mkrf default_executable: bindir: bin has_rdoc: true required_ruby_version: !ruby/object:Gem::Version::Requirement requirements: - - ">" - !ruby/object:Gem::Version version: 0.0.0 version: platform: ruby signing_key: cert_chain: post_install_message: authors: - Kevin Clark files: - Rakefile - README - CHANGELOG - MIT-LICENSE - lib/mkrf - lib/mkrf/rakehelper.rb - lib/mkrf/generator.rb - lib/mkrf/availability.rb - lib/mkrf.rb - test/integration - test/integration/test_sample_projects.rb - test/sample_files - test/sample_files/libxml-ruby-0.3.8 - test/sample_files/libxml-ruby-0.3.8/ext - test/sample_files/libxml-ruby-0.3.8/ext/xml - test/sample_files/libxml-ruby-0.3.8/ext/xml/libxml.rb - test/sample_files/libxml-ruby-0.3.8/ext/xml/ruby_xml_node.c - test/sample_files/libxml-ruby-0.3.8/ext/xml/ruby_xml_node_set.c - test/sample_files/libxml-ruby-0.3.8/ext/xml/ruby_xml_xpath_context.h - test/sample_files/libxml-ruby-0.3.8/ext/xml/ruby_xml_parser.h - test/sample_files/libxml-ruby-0.3.8/ext/xml/ruby_xml_schema.c - test/sample_files/libxml-ruby-0.3.8/ext/xml/ruby_xml_xpointer_context.h - test/sample_files/libxml-ruby-0.3.8/ext/xml/ruby_xml_input_cbg.c - test/sample_files/libxml-ruby-0.3.8/ext/xml/ruby_xml_xpointer.h - test/sample_files/libxml-ruby-0.3.8/ext/xml/ruby_xml_document.h - test/sample_files/libxml-ruby-0.3.8/ext/xml/ruby_xml_parser_context.h - test/sample_files/libxml-ruby-0.3.8/ext/xml/ruby_xml_sax_parser.h - test/sample_files/libxml-ruby-0.3.8/ext/xml/ruby_xml_xpath_context.c - test/sample_files/libxml-ruby-0.3.8/ext/xml/ruby_xml_xpointer.c - test/sample_files/libxml-ruby-0.3.8/ext/xml/ruby_xml_xinclude.h - test/sample_files/libxml-ruby-0.3.8/ext/xml/ruby_xml_xinclude.c - test/sample_files/libxml-ruby-0.3.8/ext/xml/ruby_xml_attr.c - test/sample_files/libxml-ruby-0.3.8/ext/xml/ruby_xml_node.h - test/sample_files/libxml-ruby-0.3.8/ext/xml/sax_parser_callbacks.inc - test/sample_files/libxml-ruby-0.3.8/ext/xml/ruby_xml_ns.c - test/sample_files/libxml-ruby-0.3.8/ext/xml/ruby_xml_tree.c - test/sample_files/libxml-ruby-0.3.8/ext/xml/ruby_xml_ns.h - test/sample_files/libxml-ruby-0.3.8/ext/xml/cbg.c - test/sample_files/libxml-ruby-0.3.8/ext/xml/ruby_xml_tree.h - test/sample_files/libxml-ruby-0.3.8/ext/xml/libxml.c - test/sample_files/libxml-ruby-0.3.8/ext/xml/ruby_xml_document.c - test/sample_files/libxml-ruby-0.3.8/ext/xml/ruby_xml_attribute.h - test/sample_files/libxml-ruby-0.3.8/ext/xml/extconf.rb - test/sample_files/libxml-ruby-0.3.8/ext/xml/ruby_xml_xpath.h - test/sample_files/libxml-ruby-0.3.8/ext/xml/ruby_xml_input_cbg.h - test/sample_files/libxml-ruby-0.3.8/ext/xml/ruby_xml_dtd.h - test/sample_files/libxml-ruby-0.3.8/ext/xml/ruby_xml_parser.c - test/sample_files/libxml-ruby-0.3.8/ext/xml/ruby_xml_xpath.c - test/sample_files/libxml-ruby-0.3.8/ext/xml/ruby_xml_node_set.h - test/sample_files/libxml-ruby-0.3.8/ext/xml/ruby_xml_sax_parser.c - test/sample_files/libxml-ruby-0.3.8/ext/xml/old_extconf.rb - test/sample_files/libxml-ruby-0.3.8/ext/xml/libxml.h - test/sample_files/libxml-ruby-0.3.8/ext/xml/ruby_xml_attr.h - test/sample_files/libxml-ruby-0.3.8/ext/xml/ruby_xml_attribute.c - test/sample_files/libxml-ruby-0.3.8/ext/xml/ruby_xml_xpointer_context.c - test/sample_files/libxml-ruby-0.3.8/ext/xml/ruby_xml_dtd.c - test/sample_files/libxml-ruby-0.3.8/ext/xml/ruby_xml_schema.h - test/sample_files/libxml-ruby-0.3.8/ext/xml/ruby_xml_parser_context.c - test/sample_files/libxml-ruby-0.3.8/CHANGELOG - test/sample_files/libxml-ruby-0.3.8/README - test/sample_files/libxml-ruby-0.3.8/LICENSE - test/sample_files/syck-0.55 - test/sample_files/syck-0.55/ext - test/sample_files/syck-0.55/ext/ruby - test/sample_files/syck-0.55/ext/ruby/ext - test/sample_files/syck-0.55/ext/ruby/ext/syck - test/sample_files/syck-0.55/ext/ruby/ext/syck/gram.c - test/sample_files/syck-0.55/ext/ruby/ext/syck/bytecode.c - test/sample_files/syck-0.55/ext/ruby/ext/syck/yaml2byte.c - test/sample_files/syck-0.55/ext/ruby/ext/syck/gram.h - test/sample_files/syck-0.55/ext/ruby/ext/syck/rubyext.c - test/sample_files/syck-0.55/ext/ruby/ext/syck/implicit.c - test/sample_files/syck-0.55/ext/ruby/ext/syck/syck.c - test/sample_files/syck-0.55/ext/ruby/ext/syck/emitter.c - test/sample_files/syck-0.55/ext/ruby/ext/syck/token.c - test/sample_files/syck-0.55/ext/ruby/ext/syck/yamlbyte.h - test/sample_files/syck-0.55/ext/ruby/ext/syck/MANIFEST - test/sample_files/syck-0.55/ext/ruby/ext/syck/handler.c - test/sample_files/syck-0.55/ext/ruby/ext/syck/extconf.rb - test/sample_files/syck-0.55/ext/ruby/ext/syck/node.c - test/sample_files/syck-0.55/ext/ruby/ext/syck/syck.h - test/sample_files/syck-0.55/ext/ruby/tests - test/sample_files/syck-0.55/ext/ruby/tests/basic.rb - test/sample_files/syck-0.55/ext/ruby/samples - test/sample_files/syck-0.55/ext/ruby/samples/yaml-sortHashKeys.rb - test/sample_files/syck-0.55/ext/ruby/samples/okayNews-sample.rb - test/sample_files/syck-0.55/ext/ruby/samples/okayNews-validate.rb - test/sample_files/syck-0.55/ext/ruby/samples/okayNews-modules.rb - test/sample_files/syck-0.55/ext/ruby/samples/okayRpc-client.rb - test/sample_files/syck-0.55/ext/ruby/samples/okayRpc-server.rb - test/sample_files/syck-0.55/ext/ruby/lib - test/sample_files/syck-0.55/ext/ruby/lib/okay - test/sample_files/syck-0.55/ext/ruby/lib/okay/rpc.rb - test/sample_files/syck-0.55/ext/ruby/lib/okay/news.rb - test/sample_files/syck-0.55/ext/ruby/lib/yaml - test/sample_files/syck-0.55/ext/ruby/lib/yaml/tag.rb - test/sample_files/syck-0.55/ext/ruby/lib/yaml/ypath.rb - test/sample_files/syck-0.55/ext/ruby/lib/yaml/encoding.rb - test/sample_files/syck-0.55/ext/ruby/lib/yaml/constants.rb - test/sample_files/syck-0.55/ext/ruby/lib/yaml/compat.rb - test/sample_files/syck-0.55/ext/ruby/lib/yaml/types.rb - test/sample_files/syck-0.55/ext/ruby/lib/yaml/baseemitter.rb - test/sample_files/syck-0.55/ext/ruby/lib/yaml/syck.rb - test/sample_files/syck-0.55/ext/ruby/lib/yaml/rubytypes.rb - test/sample_files/syck-0.55/ext/ruby/lib/yaml/emitter.rb - test/sample_files/syck-0.55/ext/ruby/lib/yaml/error.rb - test/sample_files/syck-0.55/ext/ruby/lib/yaml/stream.rb - test/sample_files/syck-0.55/ext/ruby/lib/yaml/stringio.rb - test/sample_files/syck-0.55/ext/ruby/lib/yaml/dbm.rb - test/sample_files/syck-0.55/ext/ruby/lib/yaml/store.rb - test/sample_files/syck-0.55/ext/ruby/lib/yaml/yamlnode.rb - test/sample_files/syck-0.55/ext/ruby/lib/yaml/basenode.rb - test/sample_files/syck-0.55/ext/ruby/lib/yaml.rb - test/sample_files/syck-0.55/ext/ruby/lib/okay.rb - test/sample_files/syck-0.55/ext/ruby/lib/yod.rb - test/sample_files/syck-0.55/ext/ruby/yts - test/sample_files/syck-0.55/ext/ruby/yts/YtsFlowCollections.yml - test/sample_files/syck-0.55/ext/ruby/yts/YtsNullsAndEmpties.yml - test/sample_files/syck-0.55/ext/ruby/yts/YtsMapInSeq.yml - test/sample_files/syck-0.55/ext/ruby/yts/yts.rb - test/sample_files/syck-0.55/ext/ruby/yts/YtsTypeTransfers.yml - test/sample_files/syck-0.55/ext/ruby/yts/index.yml - test/sample_files/syck-0.55/ext/ruby/yts/YtsFoldedScalars.yml - test/sample_files/syck-0.55/ext/ruby/yts/YtsAnchorAlias.yml - test/sample_files/syck-0.55/ext/ruby/yts/YtsBlockMapping.yml - test/sample_files/syck-0.55/ext/ruby/yts/YtsBasicTests.yml - test/sample_files/syck-0.55/ext/ruby/yts/YtsSpecificationExamples.yml - test/sample_files/syck-0.55/ext/ruby/yts/YtsDocumentSeparator.yml - test/sample_files/syck-0.55/ext/ruby/yts/cookbook.rb - test/sample_files/syck-0.55/ext/ruby/yts/YtsYpath.yml - test/sample_files/syck-0.55/ext/ruby/yts/YtsRubyTests.yml - test/sample_files/syck-0.55/ext/ruby/yts/YtsBlockSequence.yml - test/sample_files/syck-0.55/ext/ruby/yts/YtsErrorTests.yml - test/sample_files/syck-0.55/ext/ruby/CHANGELOG - test/sample_files/syck-0.55/ext/ruby/install.rb - test/sample_files/syck-0.55/ext/ruby/README - test/sample_files/syck-0.55/tests - test/sample_files/syck-0.55/tests/Basic.c - test/sample_files/syck-0.55/tests/CuTest.c - test/sample_files/syck-0.55/tests/Parse.c - test/sample_files/syck-0.55/tests/Makefile.am - test/sample_files/syck-0.55/tests/Makefile.in - test/sample_files/syck-0.55/tests/YTS.c.rb - test/sample_files/syck-0.55/tests/Makefile - test/sample_files/syck-0.55/tests/CuTest.h - test/sample_files/syck-0.55/tests/YTS.c - test/sample_files/syck-0.55/tests/YTS.c.erb - test/sample_files/syck-0.55/tests/Emit.c - test/sample_files/syck-0.55/config - test/sample_files/syck-0.55/config/install-sh - test/sample_files/syck-0.55/config/README - test/sample_files/syck-0.55/config/missing - test/sample_files/syck-0.55/config/depcomp - test/sample_files/syck-0.55/lib - test/sample_files/syck-0.55/lib/token.re - test/sample_files/syck-0.55/lib/gram.c - test/sample_files/syck-0.55/lib/bytecode.c - test/sample_files/syck-0.55/lib/gram.output - test/sample_files/syck-0.55/lib/yaml2byte.c - test/sample_files/syck-0.55/lib/gram.h - test/sample_files/syck-0.55/lib/implicit.c - test/sample_files/syck-0.55/lib/Makefile.am - test/sample_files/syck-0.55/lib/Makefile.in - test/sample_files/syck-0.55/lib/syck.c - test/sample_files/syck-0.55/lib/emitter.c - test/sample_files/syck-0.55/lib/token.c - test/sample_files/syck-0.55/lib/yamlbyte.h - test/sample_files/syck-0.55/lib/syck_st.c - test/sample_files/syck-0.55/lib/Makefile - test/sample_files/syck-0.55/lib/handler.c - test/sample_files/syck-0.55/lib/implicit.re - test/sample_files/syck-0.55/lib/bytecode.re - test/sample_files/syck-0.55/lib/node.c - test/sample_files/syck-0.55/lib/syck.h - test/sample_files/syck-0.55/lib/gram.y - test/sample_files/syck-0.55/lib/syck_st.h - test/sample_files/syck-0.55/CHANGELOG - test/sample_files/syck-0.55/configure.in - test/sample_files/syck-0.55/configure - test/sample_files/syck-0.55/TODO - test/sample_files/syck-0.55/config.h - test/sample_files/syck-0.55/config.status - test/sample_files/syck-0.55/README - test/sample_files/syck-0.55/aclocal.m4 - test/sample_files/syck-0.55/RELEASE - test/sample_files/syck-0.55/Makefile.am - test/sample_files/syck-0.55/config.h.in - test/sample_files/syck-0.55/README.BYTECODE - test/sample_files/syck-0.55/Makefile.in - test/sample_files/syck-0.55/bootstrap - test/sample_files/syck-0.55/README.EXT - test/sample_files/syck-0.55/Makefile - test/sample_files/syck-0.55/stamp-h1 - test/sample_files/syck-0.55/COPYING - test/sample_files/libtrivial - test/sample_files/libtrivial/ext - test/sample_files/libtrivial/ext/libtrivial.c - test/sample_files/libtrivial/ext/extconf.rb - test/sample_files/cpp_bang - test/sample_files/cpp_bang/ext - test/sample_files/cpp_bang/ext/bang.cpp - test/sample_files/cpp_bang/ext/mkrf_config.rb - test/fixtures - test/fixtures/down_a_directory - test/fixtures/down_a_directory/header_down_a_directory.h - test/fixtures/stdmkrf.h - test/fixtures/some_binary - test/unit - test/unit/test_generator.rb - test/unit/test_availability.rb - test/abstract_unit.rb test_files: [] rdoc_options: - --main - README - --title - mkrf extra_rdoc_files: - README - MIT-LICENSE - CHANGELOG executables: [] extensions: [] requirements: - rake dependencies: []