ruby-json-2.1.0+dfsg.orig/0000755000175000017500000000000013113111601014662 5ustar boutilboutilruby-json-2.1.0+dfsg.orig/data/0000755000175000017500000000000013113111601015573 5ustar boutilboutilruby-json-2.1.0+dfsg.orig/data/prototype.js0000644000175000017500000036235013113111601020207 0ustar boutilboutil/* Prototype JavaScript framework, version 1.6.0 * (c) 2005-2007 Sam Stephenson * * Prototype is freely distributable under the terms of an MIT-style license. * For details, see the Prototype web site: http://www.prototypejs.org/ * *--------------------------------------------------------------------------*/ var Prototype = { Version: '1.6.0', Browser: { IE: !!(window.attachEvent && !window.opera), Opera: !!window.opera, WebKit: navigator.userAgent.indexOf('AppleWebKit/') > -1, Gecko: navigator.userAgent.indexOf('Gecko') > -1 && navigator.userAgent.indexOf('KHTML') == -1, MobileSafari: !!navigator.userAgent.match(/Apple.*Mobile.*Safari/) }, BrowserFeatures: { XPath: !!document.evaluate, ElementExtensions: !!window.HTMLElement, SpecificElementExtensions: document.createElement('div').__proto__ && document.createElement('div').__proto__ !== document.createElement('form').__proto__ }, ScriptFragment: ']*>([\\S\\s]*?)<\/script>', JSONFilter: /^\/\*-secure-([\s\S]*)\*\/\s*$/, emptyFunction: function() { }, K: function(x) { return x } }; if (Prototype.Browser.MobileSafari) Prototype.BrowserFeatures.SpecificElementExtensions = false; if (Prototype.Browser.WebKit) Prototype.BrowserFeatures.XPath = false; /* Based on Alex Arnell's inheritance implementation. */ var Class = { create: function() { var parent = null, properties = $A(arguments); if (Object.isFunction(properties[0])) parent = properties.shift(); function klass() { this.initialize.apply(this, arguments); } Object.extend(klass, Class.Methods); klass.superclass = parent; klass.subclasses = []; if (parent) { var subclass = function() { }; subclass.prototype = parent.prototype; klass.prototype = new subclass; parent.subclasses.push(klass); } for (var i = 0; i < properties.length; i++) klass.addMethods(properties[i]); if (!klass.prototype.initialize) klass.prototype.initialize = Prototype.emptyFunction; klass.prototype.constructor = klass; return klass; } }; Class.Methods = { addMethods: function(source) { var ancestor = this.superclass && this.superclass.prototype; var properties = Object.keys(source); if (!Object.keys({ toString: true }).length) properties.push("toString", "valueOf"); for (var i = 0, length = properties.length; i < length; i++) { var property = properties[i], value = source[property]; if (ancestor && Object.isFunction(value) && value.argumentNames().first() == "$super") { var method = value, value = Object.extend((function(m) { return function() { return ancestor[m].apply(this, arguments) }; })(property).wrap(method), { valueOf: function() { return method }, toString: function() { return method.toString() } }); } this.prototype[property] = value; } return this; } }; var Abstract = { }; Object.extend = function(destination, source) { for (var property in source) destination[property] = source[property]; return destination; }; Object.extend(Object, { inspect: function(object) { try { if (object === undefined) return 'undefined'; if (object === null) return 'null'; return object.inspect ? object.inspect() : object.toString(); } catch (e) { if (e instanceof RangeError) return '...'; throw e; } }, toJSON: function(object) { var type = typeof object; switch (type) { case 'undefined': case 'function': case 'unknown': return; case 'boolean': return object.toString(); } if (object === null) return 'null'; if (object.toJSON) return object.toJSON(); if (Object.isElement(object)) return; var results = []; for (var property in object) { var value = Object.toJSON(object[property]); if (value !== undefined) results.push(property.toJSON() + ': ' + value); } return '{' + results.join(', ') + '}'; }, toQueryString: function(object) { return $H(object).toQueryString(); }, toHTML: function(object) { return object && object.toHTML ? object.toHTML() : String.interpret(object); }, keys: function(object) { var keys = []; for (var property in object) keys.push(property); return keys; }, values: function(object) { var values = []; for (var property in object) values.push(object[property]); return values; }, clone: function(object) { return Object.extend({ }, object); }, isElement: function(object) { return object && object.nodeType == 1; }, isArray: function(object) { return object && object.constructor === Array; }, isHash: function(object) { return object instanceof Hash; }, isFunction: function(object) { return typeof object == "function"; }, isString: function(object) { return typeof object == "string"; }, isNumber: function(object) { return typeof object == "number"; }, isUndefined: function(object) { return typeof object == "undefined"; } }); Object.extend(Function.prototype, { argumentNames: function() { var names = this.toString().match(/^[\s\(]*function[^(]*\((.*?)\)/)[1].split(",").invoke("strip"); return names.length == 1 && !names[0] ? [] : names; }, bind: function() { if (arguments.length < 2 && arguments[0] === undefined) return this; var __method = this, args = $A(arguments), object = args.shift(); return function() { return __method.apply(object, args.concat($A(arguments))); } }, bindAsEventListener: function() { var __method = this, args = $A(arguments), object = args.shift(); return function(event) { return __method.apply(object, [event || window.event].concat(args)); } }, curry: function() { if (!arguments.length) return this; var __method = this, args = $A(arguments); return function() { return __method.apply(this, args.concat($A(arguments))); } }, delay: function() { var __method = this, args = $A(arguments), timeout = args.shift() * 1000; return window.setTimeout(function() { return __method.apply(__method, args); }, timeout); }, wrap: function(wrapper) { var __method = this; return function() { return wrapper.apply(this, [__method.bind(this)].concat($A(arguments))); } }, methodize: function() { if (this._methodized) return this._methodized; var __method = this; return this._methodized = function() { return __method.apply(null, [this].concat($A(arguments))); }; } }); Function.prototype.defer = Function.prototype.delay.curry(0.01); Date.prototype.toJSON = function() { return '"' + this.getUTCFullYear() + '-' + (this.getUTCMonth() + 1).toPaddedString(2) + '-' + this.getUTCDate().toPaddedString(2) + 'T' + this.getUTCHours().toPaddedString(2) + ':' + this.getUTCMinutes().toPaddedString(2) + ':' + this.getUTCSeconds().toPaddedString(2) + 'Z"'; }; var Try = { these: function() { var returnValue; for (var i = 0, length = arguments.length; i < length; i++) { var lambda = arguments[i]; try { returnValue = lambda(); break; } catch (e) { } } return returnValue; } }; RegExp.prototype.match = RegExp.prototype.test; RegExp.escape = function(str) { return String(str).replace(/([.*+?^=!:${}()|[\]\/\\])/g, '\\$1'); }; /*--------------------------------------------------------------------------*/ var PeriodicalExecuter = Class.create({ initialize: function(callback, frequency) { this.callback = callback; this.frequency = frequency; this.currentlyExecuting = false; this.registerCallback(); }, registerCallback: function() { this.timer = setInterval(this.onTimerEvent.bind(this), this.frequency * 1000); }, execute: function() { this.callback(this); }, stop: function() { if (!this.timer) return; clearInterval(this.timer); this.timer = null; }, onTimerEvent: function() { if (!this.currentlyExecuting) { try { this.currentlyExecuting = true; this.execute(); } finally { this.currentlyExecuting = false; } } } }); Object.extend(String, { interpret: function(value) { return value == null ? '' : String(value); }, specialChar: { '\b': '\\b', '\t': '\\t', '\n': '\\n', '\f': '\\f', '\r': '\\r', '\\': '\\\\' } }); Object.extend(String.prototype, { gsub: function(pattern, replacement) { var result = '', source = this, match; replacement = arguments.callee.prepareReplacement(replacement); while (source.length > 0) { if (match = source.match(pattern)) { result += source.slice(0, match.index); result += String.interpret(replacement(match)); source = source.slice(match.index + match[0].length); } else { result += source, source = ''; } } return result; }, sub: function(pattern, replacement, count) { replacement = this.gsub.prepareReplacement(replacement); count = count === undefined ? 1 : count; return this.gsub(pattern, function(match) { if (--count < 0) return match[0]; return replacement(match); }); }, scan: function(pattern, iterator) { this.gsub(pattern, iterator); return String(this); }, truncate: function(length, truncation) { length = length || 30; truncation = truncation === undefined ? '...' : truncation; return this.length > length ? this.slice(0, length - truncation.length) + truncation : String(this); }, strip: function() { return this.replace(/^\s+/, '').replace(/\s+$/, ''); }, stripTags: function() { return this.replace(/<\/?[^>]+>/gi, ''); }, stripScripts: function() { return this.replace(new RegExp(Prototype.ScriptFragment, 'img'), ''); }, extractScripts: function() { var matchAll = new RegExp(Prototype.ScriptFragment, 'img'); var matchOne = new RegExp(Prototype.ScriptFragment, 'im'); return (this.match(matchAll) || []).map(function(scriptTag) { return (scriptTag.match(matchOne) || ['', ''])[1]; }); }, evalScripts: function() { return this.extractScripts().map(function(script) { return eval(script) }); }, escapeHTML: function() { var self = arguments.callee; self.text.data = this; return self.div.innerHTML; }, unescapeHTML: function() { var div = new Element('div'); div.innerHTML = this.stripTags(); return div.childNodes[0] ? (div.childNodes.length > 1 ? $A(div.childNodes).inject('', function(memo, node) { return memo+node.nodeValue }) : div.childNodes[0].nodeValue) : ''; }, toQueryParams: function(separator) { var match = this.strip().match(/([^?#]*)(#.*)?$/); if (!match) return { }; return match[1].split(separator || '&').inject({ }, function(hash, pair) { if ((pair = pair.split('='))[0]) { var key = decodeURIComponent(pair.shift()); var value = pair.length > 1 ? pair.join('=') : pair[0]; if (value != undefined) value = decodeURIComponent(value); if (key in hash) { if (!Object.isArray(hash[key])) hash[key] = [hash[key]]; hash[key].push(value); } else hash[key] = value; } return hash; }); }, toArray: function() { return this.split(''); }, succ: function() { return this.slice(0, this.length - 1) + String.fromCharCode(this.charCodeAt(this.length - 1) + 1); }, times: function(count) { return count < 1 ? '' : new Array(count + 1).join(this); }, camelize: function() { var parts = this.split('-'), len = parts.length; if (len == 1) return parts[0]; var camelized = this.charAt(0) == '-' ? parts[0].charAt(0).toUpperCase() + parts[0].substring(1) : parts[0]; for (var i = 1; i < len; i++) camelized += parts[i].charAt(0).toUpperCase() + parts[i].substring(1); return camelized; }, capitalize: function() { return this.charAt(0).toUpperCase() + this.substring(1).toLowerCase(); }, underscore: function() { return this.gsub(/::/, '/').gsub(/([A-Z]+)([A-Z][a-z])/,'#{1}_#{2}').gsub(/([a-z\d])([A-Z])/,'#{1}_#{2}').gsub(/-/,'_').toLowerCase(); }, dasherize: function() { return this.gsub(/_/,'-'); }, inspect: function(useDoubleQuotes) { var escapedString = this.gsub(/[\x00-\x1f\\]/, function(match) { var character = String.specialChar[match[0]]; return character ? character : '\\u00' + match[0].charCodeAt().toPaddedString(2, 16); }); if (useDoubleQuotes) return '"' + escapedString.replace(/"/g, '\\"') + '"'; return "'" + escapedString.replace(/'/g, '\\\'') + "'"; }, toJSON: function() { return this.inspect(true); }, unfilterJSON: function(filter) { return this.sub(filter || Prototype.JSONFilter, '#{1}'); }, isJSON: function() { var str = this.replace(/\\./g, '@').replace(/"[^"\\\n\r]*"/g, ''); return (/^[,:{}\[\]0-9.\-+Eaeflnr-u \n\r\t]*$/).test(str); }, evalJSON: function(sanitize) { var json = this.unfilterJSON(); try { if (!sanitize || json.isJSON()) return eval('(' + json + ')'); } catch (e) { } throw new SyntaxError('Badly formed JSON string: ' + this.inspect()); }, include: function(pattern) { return this.indexOf(pattern) > -1; }, startsWith: function(pattern) { return this.indexOf(pattern) === 0; }, endsWith: function(pattern) { var d = this.length - pattern.length; return d >= 0 && this.lastIndexOf(pattern) === d; }, empty: function() { return this == ''; }, blank: function() { return /^\s*$/.test(this); }, interpolate: function(object, pattern) { return new Template(this, pattern).evaluate(object); } }); if (Prototype.Browser.WebKit || Prototype.Browser.IE) Object.extend(String.prototype, { escapeHTML: function() { return this.replace(/&/g,'&').replace(//g,'>'); }, unescapeHTML: function() { return this.replace(/&/g,'&').replace(/</g,'<').replace(/>/g,'>'); } }); String.prototype.gsub.prepareReplacement = function(replacement) { if (Object.isFunction(replacement)) return replacement; var template = new Template(replacement); return function(match) { return template.evaluate(match) }; }; String.prototype.parseQuery = String.prototype.toQueryParams; Object.extend(String.prototype.escapeHTML, { div: document.createElement('div'), text: document.createTextNode('') }); with (String.prototype.escapeHTML) div.appendChild(text); var Template = Class.create({ initialize: function(template, pattern) { this.template = template.toString(); this.pattern = pattern || Template.Pattern; }, evaluate: function(object) { if (Object.isFunction(object.toTemplateReplacements)) object = object.toTemplateReplacements(); return this.template.gsub(this.pattern, function(match) { if (object == null) return ''; var before = match[1] || ''; if (before == '\\') return match[2]; var ctx = object, expr = match[3]; var pattern = /^([^.[]+|\[((?:.*?[^\\])?)\])(\.|\[|$)/, match = pattern.exec(expr); if (match == null) return before; while (match != null) { var comp = match[1].startsWith('[') ? match[2].gsub('\\\\]', ']') : match[1]; ctx = ctx[comp]; if (null == ctx || '' == match[3]) break; expr = expr.substring('[' == match[3] ? match[1].length : match[0].length); match = pattern.exec(expr); } return before + String.interpret(ctx); }.bind(this)); } }); Template.Pattern = /(^|.|\r|\n)(#\{(.*?)\})/; var $break = { }; var Enumerable = { each: function(iterator, context) { var index = 0; iterator = iterator.bind(context); try { this._each(function(value) { iterator(value, index++); }); } catch (e) { if (e != $break) throw e; } return this; }, eachSlice: function(number, iterator, context) { iterator = iterator ? iterator.bind(context) : Prototype.K; var index = -number, slices = [], array = this.toArray(); while ((index += number) < array.length) slices.push(array.slice(index, index+number)); return slices.collect(iterator, context); }, all: function(iterator, context) { iterator = iterator ? iterator.bind(context) : Prototype.K; var result = true; this.each(function(value, index) { result = result && !!iterator(value, index); if (!result) throw $break; }); return result; }, any: function(iterator, context) { iterator = iterator ? iterator.bind(context) : Prototype.K; var result = false; this.each(function(value, index) { if (result = !!iterator(value, index)) throw $break; }); return result; }, collect: function(iterator, context) { iterator = iterator ? iterator.bind(context) : Prototype.K; var results = []; this.each(function(value, index) { results.push(iterator(value, index)); }); return results; }, detect: function(iterator, context) { iterator = iterator.bind(context); var result; this.each(function(value, index) { if (iterator(value, index)) { result = value; throw $break; } }); return result; }, findAll: function(iterator, context) { iterator = iterator.bind(context); var results = []; this.each(function(value, index) { if (iterator(value, index)) results.push(value); }); return results; }, grep: function(filter, iterator, context) { iterator = iterator ? iterator.bind(context) : Prototype.K; var results = []; if (Object.isString(filter)) filter = new RegExp(filter); this.each(function(value, index) { if (filter.match(value)) results.push(iterator(value, index)); }); return results; }, include: function(object) { if (Object.isFunction(this.indexOf)) if (this.indexOf(object) != -1) return true; var found = false; this.each(function(value) { if (value == object) { found = true; throw $break; } }); return found; }, inGroupsOf: function(number, fillWith) { fillWith = fillWith === undefined ? null : fillWith; return this.eachSlice(number, function(slice) { while(slice.length < number) slice.push(fillWith); return slice; }); }, inject: function(memo, iterator, context) { iterator = iterator.bind(context); this.each(function(value, index) { memo = iterator(memo, value, index); }); return memo; }, invoke: function(method) { var args = $A(arguments).slice(1); return this.map(function(value) { return value[method].apply(value, args); }); }, max: function(iterator, context) { iterator = iterator ? iterator.bind(context) : Prototype.K; var result; this.each(function(value, index) { value = iterator(value, index); if (result == undefined || value >= result) result = value; }); return result; }, min: function(iterator, context) { iterator = iterator ? iterator.bind(context) : Prototype.K; var result; this.each(function(value, index) { value = iterator(value, index); if (result == undefined || value < result) result = value; }); return result; }, partition: function(iterator, context) { iterator = iterator ? iterator.bind(context) : Prototype.K; var trues = [], falses = []; this.each(function(value, index) { (iterator(value, index) ? trues : falses).push(value); }); return [trues, falses]; }, pluck: function(property) { var results = []; this.each(function(value) { results.push(value[property]); }); return results; }, reject: function(iterator, context) { iterator = iterator.bind(context); var results = []; this.each(function(value, index) { if (!iterator(value, index)) results.push(value); }); return results; }, sortBy: function(iterator, context) { iterator = iterator.bind(context); return this.map(function(value, index) { return {value: value, criteria: iterator(value, index)}; }).sort(function(left, right) { var a = left.criteria, b = right.criteria; return a < b ? -1 : a > b ? 1 : 0; }).pluck('value'); }, toArray: function() { return this.map(); }, zip: function() { var iterator = Prototype.K, args = $A(arguments); if (Object.isFunction(args.last())) iterator = args.pop(); var collections = [this].concat(args).map($A); return this.map(function(value, index) { return iterator(collections.pluck(index)); }); }, size: function() { return this.toArray().length; }, inspect: function() { return '#'; } }; Object.extend(Enumerable, { map: Enumerable.collect, find: Enumerable.detect, select: Enumerable.findAll, filter: Enumerable.findAll, member: Enumerable.include, entries: Enumerable.toArray, every: Enumerable.all, some: Enumerable.any }); function $A(iterable) { if (!iterable) return []; if (iterable.toArray) return iterable.toArray(); var length = iterable.length, results = new Array(length); while (length--) results[length] = iterable[length]; return results; } if (Prototype.Browser.WebKit) { function $A(iterable) { if (!iterable) return []; if (!(Object.isFunction(iterable) && iterable == '[object NodeList]') && iterable.toArray) return iterable.toArray(); var length = iterable.length, results = new Array(length); while (length--) results[length] = iterable[length]; return results; } } Array.from = $A; Object.extend(Array.prototype, Enumerable); if (!Array.prototype._reverse) Array.prototype._reverse = Array.prototype.reverse; Object.extend(Array.prototype, { _each: function(iterator) { for (var i = 0, length = this.length; i < length; i++) iterator(this[i]); }, clear: function() { this.length = 0; return this; }, first: function() { return this[0]; }, last: function() { return this[this.length - 1]; }, compact: function() { return this.select(function(value) { return value != null; }); }, flatten: function() { return this.inject([], function(array, value) { return array.concat(Object.isArray(value) ? value.flatten() : [value]); }); }, without: function() { var values = $A(arguments); return this.select(function(value) { return !values.include(value); }); }, reverse: function(inline) { return (inline !== false ? this : this.toArray())._reverse(); }, reduce: function() { return this.length > 1 ? this : this[0]; }, uniq: function(sorted) { return this.inject([], function(array, value, index) { if (0 == index || (sorted ? array.last() != value : !array.include(value))) array.push(value); return array; }); }, intersect: function(array) { return this.uniq().findAll(function(item) { return array.detect(function(value) { return item === value }); }); }, clone: function() { return [].concat(this); }, size: function() { return this.length; }, inspect: function() { return '[' + this.map(Object.inspect).join(', ') + ']'; }, toJSON: function() { var results = []; this.each(function(object) { var value = Object.toJSON(object); if (value !== undefined) results.push(value); }); return '[' + results.join(', ') + ']'; } }); // use native browser JS 1.6 implementation if available if (Object.isFunction(Array.prototype.forEach)) Array.prototype._each = Array.prototype.forEach; if (!Array.prototype.indexOf) Array.prototype.indexOf = function(item, i) { i || (i = 0); var length = this.length; if (i < 0) i = length + i; for (; i < length; i++) if (this[i] === item) return i; return -1; }; if (!Array.prototype.lastIndexOf) Array.prototype.lastIndexOf = function(item, i) { i = isNaN(i) ? this.length : (i < 0 ? this.length + i : i) + 1; var n = this.slice(0, i).reverse().indexOf(item); return (n < 0) ? n : i - n - 1; }; Array.prototype.toArray = Array.prototype.clone; function $w(string) { if (!Object.isString(string)) return []; string = string.strip(); return string ? string.split(/\s+/) : []; } if (Prototype.Browser.Opera){ Array.prototype.concat = function() { var array = []; for (var i = 0, length = this.length; i < length; i++) array.push(this[i]); for (var i = 0, length = arguments.length; i < length; i++) { if (Object.isArray(arguments[i])) { for (var j = 0, arrayLength = arguments[i].length; j < arrayLength; j++) array.push(arguments[i][j]); } else { array.push(arguments[i]); } } return array; }; } Object.extend(Number.prototype, { toColorPart: function() { return this.toPaddedString(2, 16); }, succ: function() { return this + 1; }, times: function(iterator) { $R(0, this, true).each(iterator); return this; }, toPaddedString: function(length, radix) { var string = this.toString(radix || 10); return '0'.times(length - string.length) + string; }, toJSON: function() { return isFinite(this) ? this.toString() : 'null'; } }); $w('abs round ceil floor').each(function(method){ Number.prototype[method] = Math[method].methodize(); }); function $H(object) { return new Hash(object); }; var Hash = Class.create(Enumerable, (function() { if (function() { var i = 0, Test = function(value) { this.key = value }; Test.prototype.key = 'foo'; for (var property in new Test('bar')) i++; return i > 1; }()) { function each(iterator) { var cache = []; for (var key in this._object) { var value = this._object[key]; if (cache.include(key)) continue; cache.push(key); var pair = [key, value]; pair.key = key; pair.value = value; iterator(pair); } } } else { function each(iterator) { for (var key in this._object) { var value = this._object[key], pair = [key, value]; pair.key = key; pair.value = value; iterator(pair); } } } function toQueryPair(key, value) { if (Object.isUndefined(value)) return key; return key + '=' + encodeURIComponent(String.interpret(value)); } return { initialize: function(object) { this._object = Object.isHash(object) ? object.toObject() : Object.clone(object); }, _each: each, set: function(key, value) { return this._object[key] = value; }, get: function(key) { return this._object[key]; }, unset: function(key) { var value = this._object[key]; delete this._object[key]; return value; }, toObject: function() { return Object.clone(this._object); }, keys: function() { return this.pluck('key'); }, values: function() { return this.pluck('value'); }, index: function(value) { var match = this.detect(function(pair) { return pair.value === value; }); return match && match.key; }, merge: function(object) { return this.clone().update(object); }, update: function(object) { return new Hash(object).inject(this, function(result, pair) { result.set(pair.key, pair.value); return result; }); }, toQueryString: function() { return this.map(function(pair) { var key = encodeURIComponent(pair.key), values = pair.value; if (values && typeof values == 'object') { if (Object.isArray(values)) return values.map(toQueryPair.curry(key)).join('&'); } return toQueryPair(key, values); }).join('&'); }, inspect: function() { return '#'; }, toJSON: function() { return Object.toJSON(this.toObject()); }, clone: function() { return new Hash(this); } } })()); Hash.prototype.toTemplateReplacements = Hash.prototype.toObject; Hash.from = $H; var ObjectRange = Class.create(Enumerable, { initialize: function(start, end, exclusive) { this.start = start; this.end = end; this.exclusive = exclusive; }, _each: function(iterator) { var value = this.start; while (this.include(value)) { iterator(value); value = value.succ(); } }, include: function(value) { if (value < this.start) return false; if (this.exclusive) return value < this.end; return value <= this.end; } }); var $R = function(start, end, exclusive) { return new ObjectRange(start, end, exclusive); }; var Ajax = { getTransport: function() { return Try.these( function() {return new XMLHttpRequest()}, function() {return new ActiveXObject('Msxml2.XMLHTTP')}, function() {return new ActiveXObject('Microsoft.XMLHTTP')} ) || false; }, activeRequestCount: 0 }; Ajax.Responders = { responders: [], _each: function(iterator) { this.responders._each(iterator); }, register: function(responder) { if (!this.include(responder)) this.responders.push(responder); }, unregister: function(responder) { this.responders = this.responders.without(responder); }, dispatch: function(callback, request, transport, json) { this.each(function(responder) { if (Object.isFunction(responder[callback])) { try { responder[callback].apply(responder, [request, transport, json]); } catch (e) { } } }); } }; Object.extend(Ajax.Responders, Enumerable); Ajax.Responders.register({ onCreate: function() { Ajax.activeRequestCount++ }, onComplete: function() { Ajax.activeRequestCount-- } }); Ajax.Base = Class.create({ initialize: function(options) { this.options = { method: 'post', asynchronous: true, contentType: 'application/x-www-form-urlencoded', encoding: 'UTF-8', parameters: '', evalJSON: true, evalJS: true }; Object.extend(this.options, options || { }); this.options.method = this.options.method.toLowerCase(); if (Object.isString(this.options.parameters)) this.options.parameters = this.options.parameters.toQueryParams(); } }); Ajax.Request = Class.create(Ajax.Base, { _complete: false, initialize: function($super, url, options) { $super(options); this.transport = Ajax.getTransport(); this.request(url); }, request: function(url) { this.url = url; this.method = this.options.method; var params = Object.clone(this.options.parameters); if (!['get', 'post'].include(this.method)) { // simulate other verbs over post params['_method'] = this.method; this.method = 'post'; } this.parameters = params; if (params = Object.toQueryString(params)) { // when GET, append parameters to URL if (this.method == 'get') this.url += (this.url.include('?') ? '&' : '?') + params; else if (/Konqueror|Safari|KHTML/.test(navigator.userAgent)) params += '&_='; } try { var response = new Ajax.Response(this); if (this.options.onCreate) this.options.onCreate(response); Ajax.Responders.dispatch('onCreate', this, response); this.transport.open(this.method.toUpperCase(), this.url, this.options.asynchronous); if (this.options.asynchronous) this.respondToReadyState.bind(this).defer(1); this.transport.onreadystatechange = this.onStateChange.bind(this); this.setRequestHeaders(); this.body = this.method == 'post' ? (this.options.postBody || params) : null; this.transport.send(this.body); /* Force Firefox to handle ready state 4 for synchronous requests */ if (!this.options.asynchronous && this.transport.overrideMimeType) this.onStateChange(); } catch (e) { this.dispatchException(e); } }, onStateChange: function() { var readyState = this.transport.readyState; if (readyState > 1 && !((readyState == 4) && this._complete)) this.respondToReadyState(this.transport.readyState); }, setRequestHeaders: function() { var headers = { 'X-Requested-With': 'XMLHttpRequest', 'X-Prototype-Version': Prototype.Version, 'Accept': 'text/javascript, text/html, application/xml, text/xml, */*' }; if (this.method == 'post') { headers['Content-type'] = this.options.contentType + (this.options.encoding ? '; charset=' + this.options.encoding : ''); /* Force "Connection: close" for older Mozilla browsers to work * around a bug where XMLHttpRequest sends an incorrect * Content-length header. See Mozilla Bugzilla #246651. */ if (this.transport.overrideMimeType && (navigator.userAgent.match(/Gecko\/(\d{4})/) || [0,2005])[1] < 2005) headers['Connection'] = 'close'; } // user-defined headers if (typeof this.options.requestHeaders == 'object') { var extras = this.options.requestHeaders; if (Object.isFunction(extras.push)) for (var i = 0, length = extras.length; i < length; i += 2) headers[extras[i]] = extras[i+1]; else $H(extras).each(function(pair) { headers[pair.key] = pair.value }); } for (var name in headers) this.transport.setRequestHeader(name, headers[name]); }, success: function() { var status = this.getStatus(); return !status || (status >= 200 && status < 300); }, getStatus: function() { try { return this.transport.status || 0; } catch (e) { return 0 } }, respondToReadyState: function(readyState) { var state = Ajax.Request.Events[readyState], response = new Ajax.Response(this); if (state == 'Complete') { try { this._complete = true; (this.options['on' + response.status] || this.options['on' + (this.success() ? 'Success' : 'Failure')] || Prototype.emptyFunction)(response, response.headerJSON); } catch (e) { this.dispatchException(e); } var contentType = response.getHeader('Content-type'); if (this.options.evalJS == 'force' || (this.options.evalJS && contentType && contentType.match(/^\s*(text|application)\/(x-)?(java|ecma)script(;.*)?\s*$/i))) this.evalResponse(); } try { (this.options['on' + state] || Prototype.emptyFunction)(response, response.headerJSON); Ajax.Responders.dispatch('on' + state, this, response, response.headerJSON); } catch (e) { this.dispatchException(e); } if (state == 'Complete') { // avoid memory leak in MSIE: clean up this.transport.onreadystatechange = Prototype.emptyFunction; } }, getHeader: function(name) { try { return this.transport.getResponseHeader(name); } catch (e) { return null } }, evalResponse: function() { try { return eval((this.transport.responseText || '').unfilterJSON()); } catch (e) { this.dispatchException(e); } }, dispatchException: function(exception) { (this.options.onException || Prototype.emptyFunction)(this, exception); Ajax.Responders.dispatch('onException', this, exception); } }); Ajax.Request.Events = ['Uninitialized', 'Loading', 'Loaded', 'Interactive', 'Complete']; Ajax.Response = Class.create({ initialize: function(request){ this.request = request; var transport = this.transport = request.transport, readyState = this.readyState = transport.readyState; if((readyState > 2 && !Prototype.Browser.IE) || readyState == 4) { this.status = this.getStatus(); this.statusText = this.getStatusText(); this.responseText = String.interpret(transport.responseText); this.headerJSON = this._getHeaderJSON(); } if(readyState == 4) { var xml = transport.responseXML; this.responseXML = xml === undefined ? null : xml; this.responseJSON = this._getResponseJSON(); } }, status: 0, statusText: '', getStatus: Ajax.Request.prototype.getStatus, getStatusText: function() { try { return this.transport.statusText || ''; } catch (e) { return '' } }, getHeader: Ajax.Request.prototype.getHeader, getAllHeaders: function() { try { return this.getAllResponseHeaders(); } catch (e) { return null } }, getResponseHeader: function(name) { return this.transport.getResponseHeader(name); }, getAllResponseHeaders: function() { return this.transport.getAllResponseHeaders(); }, _getHeaderJSON: function() { var json = this.getHeader('X-JSON'); if (!json) return null; json = decodeURIComponent(escape(json)); try { return json.evalJSON(this.request.options.sanitizeJSON); } catch (e) { this.request.dispatchException(e); } }, _getResponseJSON: function() { var options = this.request.options; if (!options.evalJSON || (options.evalJSON != 'force' && !(this.getHeader('Content-type') || '').include('application/json'))) return null; try { return this.transport.responseText.evalJSON(options.sanitizeJSON); } catch (e) { this.request.dispatchException(e); } } }); Ajax.Updater = Class.create(Ajax.Request, { initialize: function($super, container, url, options) { this.container = { success: (container.success || container), failure: (container.failure || (container.success ? null : container)) }; options = options || { }; var onComplete = options.onComplete; options.onComplete = (function(response, param) { this.updateContent(response.responseText); if (Object.isFunction(onComplete)) onComplete(response, param); }).bind(this); $super(url, options); }, updateContent: function(responseText) { var receiver = this.container[this.success() ? 'success' : 'failure'], options = this.options; if (!options.evalScripts) responseText = responseText.stripScripts(); if (receiver = $(receiver)) { if (options.insertion) { if (Object.isString(options.insertion)) { var insertion = { }; insertion[options.insertion] = responseText; receiver.insert(insertion); } else options.insertion(receiver, responseText); } else receiver.update(responseText); } if (this.success()) { if (this.onComplete) this.onComplete.bind(this).defer(); } } }); Ajax.PeriodicalUpdater = Class.create(Ajax.Base, { initialize: function($super, container, url, options) { $super(options); this.onComplete = this.options.onComplete; this.frequency = (this.options.frequency || 2); this.decay = (this.options.decay || 1); this.updater = { }; this.container = container; this.url = url; this.start(); }, start: function() { this.options.onComplete = this.updateComplete.bind(this); this.onTimerEvent(); }, stop: function() { this.updater.options.onComplete = undefined; clearTimeout(this.timer); (this.onComplete || Prototype.emptyFunction).apply(this, arguments); }, updateComplete: function(response) { if (this.options.decay) { this.decay = (response.responseText == this.lastText ? this.decay * this.options.decay : 1); this.lastText = response.responseText; } this.timer = this.onTimerEvent.bind(this).delay(this.decay * this.frequency); }, onTimerEvent: function() { this.updater = new Ajax.Updater(this.container, this.url, this.options); } }); function $(element) { if (arguments.length > 1) { for (var i = 0, elements = [], length = arguments.length; i < length; i++) elements.push($(arguments[i])); return elements; } if (Object.isString(element)) element = document.getElementById(element); return Element.extend(element); } if (Prototype.BrowserFeatures.XPath) { document._getElementsByXPath = function(expression, parentElement) { var results = []; var query = document.evaluate(expression, $(parentElement) || document, null, XPathResult.ORDERED_NODE_SNAPSHOT_TYPE, null); for (var i = 0, length = query.snapshotLength; i < length; i++) results.push(Element.extend(query.snapshotItem(i))); return results; }; } /*--------------------------------------------------------------------------*/ if (!window.Node) var Node = { }; if (!Node.ELEMENT_NODE) { // DOM level 2 ECMAScript Language Binding Object.extend(Node, { ELEMENT_NODE: 1, ATTRIBUTE_NODE: 2, TEXT_NODE: 3, CDATA_SECTION_NODE: 4, ENTITY_REFERENCE_NODE: 5, ENTITY_NODE: 6, PROCESSING_INSTRUCTION_NODE: 7, COMMENT_NODE: 8, DOCUMENT_NODE: 9, DOCUMENT_TYPE_NODE: 10, DOCUMENT_FRAGMENT_NODE: 11, NOTATION_NODE: 12 }); } (function() { var element = this.Element; this.Element = function(tagName, attributes) { attributes = attributes || { }; tagName = tagName.toLowerCase(); var cache = Element.cache; if (Prototype.Browser.IE && attributes.name) { tagName = '<' + tagName + ' name="' + attributes.name + '">'; delete attributes.name; return Element.writeAttribute(document.createElement(tagName), attributes); } if (!cache[tagName]) cache[tagName] = Element.extend(document.createElement(tagName)); return Element.writeAttribute(cache[tagName].cloneNode(false), attributes); }; Object.extend(this.Element, element || { }); }).call(window); Element.cache = { }; Element.Methods = { visible: function(element) { return $(element).style.display != 'none'; }, toggle: function(element) { element = $(element); Element[Element.visible(element) ? 'hide' : 'show'](element); return element; }, hide: function(element) { $(element).style.display = 'none'; return element; }, show: function(element) { $(element).style.display = ''; return element; }, remove: function(element) { element = $(element); element.parentNode.removeChild(element); return element; }, update: function(element, content) { element = $(element); if (content && content.toElement) content = content.toElement(); if (Object.isElement(content)) return element.update().insert(content); content = Object.toHTML(content); element.innerHTML = content.stripScripts(); content.evalScripts.bind(content).defer(); return element; }, replace: function(element, content) { element = $(element); if (content && content.toElement) content = content.toElement(); else if (!Object.isElement(content)) { content = Object.toHTML(content); var range = element.ownerDocument.createRange(); range.selectNode(element); content.evalScripts.bind(content).defer(); content = range.createContextualFragment(content.stripScripts()); } element.parentNode.replaceChild(content, element); return element; }, insert: function(element, insertions) { element = $(element); if (Object.isString(insertions) || Object.isNumber(insertions) || Object.isElement(insertions) || (insertions && (insertions.toElement || insertions.toHTML))) insertions = {bottom:insertions}; var content, t, range; for (position in insertions) { content = insertions[position]; position = position.toLowerCase(); t = Element._insertionTranslations[position]; if (content && content.toElement) content = content.toElement(); if (Object.isElement(content)) { t.insert(element, content); continue; } content = Object.toHTML(content); range = element.ownerDocument.createRange(); t.initializeRange(element, range); t.insert(element, range.createContextualFragment(content.stripScripts())); content.evalScripts.bind(content).defer(); } return element; }, wrap: function(element, wrapper, attributes) { element = $(element); if (Object.isElement(wrapper)) $(wrapper).writeAttribute(attributes || { }); else if (Object.isString(wrapper)) wrapper = new Element(wrapper, attributes); else wrapper = new Element('div', wrapper); if (element.parentNode) element.parentNode.replaceChild(wrapper, element); wrapper.appendChild(element); return wrapper; }, inspect: function(element) { element = $(element); var result = '<' + element.tagName.toLowerCase(); $H({'id': 'id', 'className': 'class'}).each(function(pair) { var property = pair.first(), attribute = pair.last(); var value = (element[property] || '').toString(); if (value) result += ' ' + attribute + '=' + value.inspect(true); }); return result + '>'; }, recursivelyCollect: function(element, property) { element = $(element); var elements = []; while (element = element[property]) if (element.nodeType == 1) elements.push(Element.extend(element)); return elements; }, ancestors: function(element) { return $(element).recursivelyCollect('parentNode'); }, descendants: function(element) { return $A($(element).getElementsByTagName('*')).each(Element.extend); }, firstDescendant: function(element) { element = $(element).firstChild; while (element && element.nodeType != 1) element = element.nextSibling; return $(element); }, immediateDescendants: function(element) { if (!(element = $(element).firstChild)) return []; while (element && element.nodeType != 1) element = element.nextSibling; if (element) return [element].concat($(element).nextSiblings()); return []; }, previousSiblings: function(element) { return $(element).recursivelyCollect('previousSibling'); }, nextSiblings: function(element) { return $(element).recursivelyCollect('nextSibling'); }, siblings: function(element) { element = $(element); return element.previousSiblings().reverse().concat(element.nextSiblings()); }, match: function(element, selector) { if (Object.isString(selector)) selector = new Selector(selector); return selector.match($(element)); }, up: function(element, expression, index) { element = $(element); if (arguments.length == 1) return $(element.parentNode); var ancestors = element.ancestors(); return expression ? Selector.findElement(ancestors, expression, index) : ancestors[index || 0]; }, down: function(element, expression, index) { element = $(element); if (arguments.length == 1) return element.firstDescendant(); var descendants = element.descendants(); return expression ? Selector.findElement(descendants, expression, index) : descendants[index || 0]; }, previous: function(element, expression, index) { element = $(element); if (arguments.length == 1) return $(Selector.handlers.previousElementSibling(element)); var previousSiblings = element.previousSiblings(); return expression ? Selector.findElement(previousSiblings, expression, index) : previousSiblings[index || 0]; }, next: function(element, expression, index) { element = $(element); if (arguments.length == 1) return $(Selector.handlers.nextElementSibling(element)); var nextSiblings = element.nextSiblings(); return expression ? Selector.findElement(nextSiblings, expression, index) : nextSiblings[index || 0]; }, select: function() { var args = $A(arguments), element = $(args.shift()); return Selector.findChildElements(element, args); }, adjacent: function() { var args = $A(arguments), element = $(args.shift()); return Selector.findChildElements(element.parentNode, args).without(element); }, identify: function(element) { element = $(element); var id = element.readAttribute('id'), self = arguments.callee; if (id) return id; do { id = 'anonymous_element_' + self.counter++ } while ($(id)); element.writeAttribute('id', id); return id; }, readAttribute: function(element, name) { element = $(element); if (Prototype.Browser.IE) { var t = Element._attributeTranslations.read; if (t.values[name]) return t.values[name](element, name); if (t.names[name]) name = t.names[name]; if (name.include(':')) { return (!element.attributes || !element.attributes[name]) ? null : element.attributes[name].value; } } return element.getAttribute(name); }, writeAttribute: function(element, name, value) { element = $(element); var attributes = { }, t = Element._attributeTranslations.write; if (typeof name == 'object') attributes = name; else attributes[name] = value === undefined ? true : value; for (var attr in attributes) { var name = t.names[attr] || attr, value = attributes[attr]; if (t.values[attr]) name = t.values[attr](element, value); if (value === false || value === null) element.removeAttribute(name); else if (value === true) element.setAttribute(name, name); else element.setAttribute(name, value); } return element; }, getHeight: function(element) { return $(element).getDimensions().height; }, getWidth: function(element) { return $(element).getDimensions().width; }, classNames: function(element) { return new Element.ClassNames(element); }, hasClassName: function(element, className) { if (!(element = $(element))) return; var elementClassName = element.className; return (elementClassName.length > 0 && (elementClassName == className || new RegExp("(^|\\s)" + className + "(\\s|$)").test(elementClassName))); }, addClassName: function(element, className) { if (!(element = $(element))) return; if (!element.hasClassName(className)) element.className += (element.className ? ' ' : '') + className; return element; }, removeClassName: function(element, className) { if (!(element = $(element))) return; element.className = element.className.replace( new RegExp("(^|\\s+)" + className + "(\\s+|$)"), ' ').strip(); return element; }, toggleClassName: function(element, className) { if (!(element = $(element))) return; return element[element.hasClassName(className) ? 'removeClassName' : 'addClassName'](className); }, // removes whitespace-only text node children cleanWhitespace: function(element) { element = $(element); var node = element.firstChild; while (node) { var nextNode = node.nextSibling; if (node.nodeType == 3 && !/\S/.test(node.nodeValue)) element.removeChild(node); node = nextNode; } return element; }, empty: function(element) { return $(element).innerHTML.blank(); }, descendantOf: function(element, ancestor) { element = $(element), ancestor = $(ancestor); if (element.compareDocumentPosition) return (element.compareDocumentPosition(ancestor) & 8) === 8; if (element.sourceIndex && !Prototype.Browser.Opera) { var e = element.sourceIndex, a = ancestor.sourceIndex, nextAncestor = ancestor.nextSibling; if (!nextAncestor) { do { ancestor = ancestor.parentNode; } while (!(nextAncestor = ancestor.nextSibling) && ancestor.parentNode); } if (nextAncestor) return (e > a && e < nextAncestor.sourceIndex); } while (element = element.parentNode) if (element == ancestor) return true; return false; }, scrollTo: function(element) { element = $(element); var pos = element.cumulativeOffset(); window.scrollTo(pos[0], pos[1]); return element; }, getStyle: function(element, style) { element = $(element); style = style == 'float' ? 'cssFloat' : style.camelize(); var value = element.style[style]; if (!value) { var css = document.defaultView.getComputedStyle(element, null); value = css ? css[style] : null; } if (style == 'opacity') return value ? parseFloat(value) : 1.0; return value == 'auto' ? null : value; }, getOpacity: function(element) { return $(element).getStyle('opacity'); }, setStyle: function(element, styles) { element = $(element); var elementStyle = element.style, match; if (Object.isString(styles)) { element.style.cssText += ';' + styles; return styles.include('opacity') ? element.setOpacity(styles.match(/opacity:\s*(\d?\.?\d*)/)[1]) : element; } for (var property in styles) if (property == 'opacity') element.setOpacity(styles[property]); else elementStyle[(property == 'float' || property == 'cssFloat') ? (elementStyle.styleFloat === undefined ? 'cssFloat' : 'styleFloat') : property] = styles[property]; return element; }, setOpacity: function(element, value) { element = $(element); element.style.opacity = (value == 1 || value === '') ? '' : (value < 0.00001) ? 0 : value; return element; }, getDimensions: function(element) { element = $(element); var display = $(element).getStyle('display'); if (display != 'none' && display != null) // Safari bug return {width: element.offsetWidth, height: element.offsetHeight}; // All *Width and *Height properties give 0 on elements with display none, // so enable the element temporarily var els = element.style; var originalVisibility = els.visibility; var originalPosition = els.position; var originalDisplay = els.display; els.visibility = 'hidden'; els.position = 'absolute'; els.display = 'block'; var originalWidth = element.clientWidth; var originalHeight = element.clientHeight; els.display = originalDisplay; els.position = originalPosition; els.visibility = originalVisibility; return {width: originalWidth, height: originalHeight}; }, makePositioned: function(element) { element = $(element); var pos = Element.getStyle(element, 'position'); if (pos == 'static' || !pos) { element._madePositioned = true; element.style.position = 'relative'; // Opera returns the offset relative to the positioning context, when an // element is position relative but top and left have not been defined if (window.opera) { element.style.top = 0; element.style.left = 0; } } return element; }, undoPositioned: function(element) { element = $(element); if (element._madePositioned) { element._madePositioned = undefined; element.style.position = element.style.top = element.style.left = element.style.bottom = element.style.right = ''; } return element; }, makeClipping: function(element) { element = $(element); if (element._overflow) return element; element._overflow = Element.getStyle(element, 'overflow') || 'auto'; if (element._overflow !== 'hidden') element.style.overflow = 'hidden'; return element; }, undoClipping: function(element) { element = $(element); if (!element._overflow) return element; element.style.overflow = element._overflow == 'auto' ? '' : element._overflow; element._overflow = null; return element; }, cumulativeOffset: function(element) { var valueT = 0, valueL = 0; do { valueT += element.offsetTop || 0; valueL += element.offsetLeft || 0; element = element.offsetParent; } while (element); return Element._returnOffset(valueL, valueT); }, positionedOffset: function(element) { var valueT = 0, valueL = 0; do { valueT += element.offsetTop || 0; valueL += element.offsetLeft || 0; element = element.offsetParent; if (element) { if (element.tagName == 'BODY') break; var p = Element.getStyle(element, 'position'); if (p == 'relative' || p == 'absolute') break; } } while (element); return Element._returnOffset(valueL, valueT); }, absolutize: function(element) { element = $(element); if (element.getStyle('position') == 'absolute') return; // Position.prepare(); // To be done manually by Scripty when it needs it. var offsets = element.positionedOffset(); var top = offsets[1]; var left = offsets[0]; var width = element.clientWidth; var height = element.clientHeight; element._originalLeft = left - parseFloat(element.style.left || 0); element._originalTop = top - parseFloat(element.style.top || 0); element._originalWidth = element.style.width; element._originalHeight = element.style.height; element.style.position = 'absolute'; element.style.top = top + 'px'; element.style.left = left + 'px'; element.style.width = width + 'px'; element.style.height = height + 'px'; return element; }, relativize: function(element) { element = $(element); if (element.getStyle('position') == 'relative') return; // Position.prepare(); // To be done manually by Scripty when it needs it. element.style.position = 'relative'; var top = parseFloat(element.style.top || 0) - (element._originalTop || 0); var left = parseFloat(element.style.left || 0) - (element._originalLeft || 0); element.style.top = top + 'px'; element.style.left = left + 'px'; element.style.height = element._originalHeight; element.style.width = element._originalWidth; return element; }, cumulativeScrollOffset: function(element) { var valueT = 0, valueL = 0; do { valueT += element.scrollTop || 0; valueL += element.scrollLeft || 0; element = element.parentNode; } while (element); return Element._returnOffset(valueL, valueT); }, getOffsetParent: function(element) { if (element.offsetParent) return $(element.offsetParent); if (element == document.body) return $(element); while ((element = element.parentNode) && element != document.body) if (Element.getStyle(element, 'position') != 'static') return $(element); return $(document.body); }, viewportOffset: function(forElement) { var valueT = 0, valueL = 0; var element = forElement; do { valueT += element.offsetTop || 0; valueL += element.offsetLeft || 0; // Safari fix if (element.offsetParent == document.body && Element.getStyle(element, 'position') == 'absolute') break; } while (element = element.offsetParent); element = forElement; do { if (!Prototype.Browser.Opera || element.tagName == 'BODY') { valueT -= element.scrollTop || 0; valueL -= element.scrollLeft || 0; } } while (element = element.parentNode); return Element._returnOffset(valueL, valueT); }, clonePosition: function(element, source) { var options = Object.extend({ setLeft: true, setTop: true, setWidth: true, setHeight: true, offsetTop: 0, offsetLeft: 0 }, arguments[2] || { }); // find page position of source source = $(source); var p = source.viewportOffset(); // find coordinate system to use element = $(element); var delta = [0, 0]; var parent = null; // delta [0,0] will do fine with position: fixed elements, // position:absolute needs offsetParent deltas if (Element.getStyle(element, 'position') == 'absolute') { parent = element.getOffsetParent(); delta = parent.viewportOffset(); } // correct by body offsets (fixes Safari) if (parent == document.body) { delta[0] -= document.body.offsetLeft; delta[1] -= document.body.offsetTop; } // set position if (options.setLeft) element.style.left = (p[0] - delta[0] + options.offsetLeft) + 'px'; if (options.setTop) element.style.top = (p[1] - delta[1] + options.offsetTop) + 'px'; if (options.setWidth) element.style.width = source.offsetWidth + 'px'; if (options.setHeight) element.style.height = source.offsetHeight + 'px'; return element; } }; Element.Methods.identify.counter = 1; Object.extend(Element.Methods, { getElementsBySelector: Element.Methods.select, childElements: Element.Methods.immediateDescendants }); Element._attributeTranslations = { write: { names: { className: 'class', htmlFor: 'for' }, values: { } } }; if (!document.createRange || Prototype.Browser.Opera) { Element.Methods.insert = function(element, insertions) { element = $(element); if (Object.isString(insertions) || Object.isNumber(insertions) || Object.isElement(insertions) || (insertions && (insertions.toElement || insertions.toHTML))) insertions = { bottom: insertions }; var t = Element._insertionTranslations, content, position, pos, tagName; for (position in insertions) { content = insertions[position]; position = position.toLowerCase(); pos = t[position]; if (content && content.toElement) content = content.toElement(); if (Object.isElement(content)) { pos.insert(element, content); continue; } content = Object.toHTML(content); tagName = ((position == 'before' || position == 'after') ? element.parentNode : element).tagName.toUpperCase(); if (t.tags[tagName]) { var fragments = Element._getContentFromAnonymousElement(tagName, content.stripScripts()); if (position == 'top' || position == 'after') fragments.reverse(); fragments.each(pos.insert.curry(element)); } else element.insertAdjacentHTML(pos.adjacency, content.stripScripts()); content.evalScripts.bind(content).defer(); } return element; }; } if (Prototype.Browser.Opera) { Element.Methods._getStyle = Element.Methods.getStyle; Element.Methods.getStyle = function(element, style) { switch(style) { case 'left': case 'top': case 'right': case 'bottom': if (Element._getStyle(element, 'position') == 'static') return null; default: return Element._getStyle(element, style); } }; Element.Methods._readAttribute = Element.Methods.readAttribute; Element.Methods.readAttribute = function(element, attribute) { if (attribute == 'title') return element.title; return Element._readAttribute(element, attribute); }; } else if (Prototype.Browser.IE) { $w('positionedOffset getOffsetParent viewportOffset').each(function(method) { Element.Methods[method] = Element.Methods[method].wrap( function(proceed, element) { element = $(element); var position = element.getStyle('position'); if (position != 'static') return proceed(element); element.setStyle({ position: 'relative' }); var value = proceed(element); element.setStyle({ position: position }); return value; } ); }); Element.Methods.getStyle = function(element, style) { element = $(element); style = (style == 'float' || style == 'cssFloat') ? 'styleFloat' : style.camelize(); var value = element.style[style]; if (!value && element.currentStyle) value = element.currentStyle[style]; if (style == 'opacity') { if (value = (element.getStyle('filter') || '').match(/alpha\(opacity=(.*)\)/)) if (value[1]) return parseFloat(value[1]) / 100; return 1.0; } if (value == 'auto') { if ((style == 'width' || style == 'height') && (element.getStyle('display') != 'none')) return element['offset' + style.capitalize()] + 'px'; return null; } return value; }; Element.Methods.setOpacity = function(element, value) { function stripAlpha(filter){ return filter.replace(/alpha\([^\)]*\)/gi,''); } element = $(element); var currentStyle = element.currentStyle; if ((currentStyle && !currentStyle.hasLayout) || (!currentStyle && element.style.zoom == 'normal')) element.style.zoom = 1; var filter = element.getStyle('filter'), style = element.style; if (value == 1 || value === '') { (filter = stripAlpha(filter)) ? style.filter = filter : style.removeAttribute('filter'); return element; } else if (value < 0.00001) value = 0; style.filter = stripAlpha(filter) + 'alpha(opacity=' + (value * 100) + ')'; return element; }; Element._attributeTranslations = { read: { names: { 'class': 'className', 'for': 'htmlFor' }, values: { _getAttr: function(element, attribute) { return element.getAttribute(attribute, 2); }, _getAttrNode: function(element, attribute) { var node = element.getAttributeNode(attribute); return node ? node.value : ""; }, _getEv: function(element, attribute) { var attribute = element.getAttribute(attribute); return attribute ? attribute.toString().slice(23, -2) : null; }, _flag: function(element, attribute) { return $(element).hasAttribute(attribute) ? attribute : null; }, style: function(element) { return element.style.cssText.toLowerCase(); }, title: function(element) { return element.title; } } } }; Element._attributeTranslations.write = { names: Object.clone(Element._attributeTranslations.read.names), values: { checked: function(element, value) { element.checked = !!value; }, style: function(element, value) { element.style.cssText = value ? value : ''; } } }; Element._attributeTranslations.has = {}; $w('colSpan rowSpan vAlign dateTime accessKey tabIndex ' + 'encType maxLength readOnly longDesc').each(function(attr) { Element._attributeTranslations.write.names[attr.toLowerCase()] = attr; Element._attributeTranslations.has[attr.toLowerCase()] = attr; }); (function(v) { Object.extend(v, { href: v._getAttr, src: v._getAttr, type: v._getAttr, action: v._getAttrNode, disabled: v._flag, checked: v._flag, readonly: v._flag, multiple: v._flag, onload: v._getEv, onunload: v._getEv, onclick: v._getEv, ondblclick: v._getEv, onmousedown: v._getEv, onmouseup: v._getEv, onmouseover: v._getEv, onmousemove: v._getEv, onmouseout: v._getEv, onfocus: v._getEv, onblur: v._getEv, onkeypress: v._getEv, onkeydown: v._getEv, onkeyup: v._getEv, onsubmit: v._getEv, onreset: v._getEv, onselect: v._getEv, onchange: v._getEv }); })(Element._attributeTranslations.read.values); } else if (Prototype.Browser.Gecko && /rv:1\.8\.0/.test(navigator.userAgent)) { Element.Methods.setOpacity = function(element, value) { element = $(element); element.style.opacity = (value == 1) ? 0.999999 : (value === '') ? '' : (value < 0.00001) ? 0 : value; return element; }; } else if (Prototype.Browser.WebKit) { Element.Methods.setOpacity = function(element, value) { element = $(element); element.style.opacity = (value == 1 || value === '') ? '' : (value < 0.00001) ? 0 : value; if (value == 1) if(element.tagName == 'IMG' && element.width) { element.width++; element.width--; } else try { var n = document.createTextNode(' '); element.appendChild(n); element.removeChild(n); } catch (e) { } return element; }; // Safari returns margins on body which is incorrect if the child is absolutely // positioned. For performance reasons, redefine Position.cumulativeOffset for // KHTML/WebKit only. Element.Methods.cumulativeOffset = function(element) { var valueT = 0, valueL = 0; do { valueT += element.offsetTop || 0; valueL += element.offsetLeft || 0; if (element.offsetParent == document.body) if (Element.getStyle(element, 'position') == 'absolute') break; element = element.offsetParent; } while (element); return Element._returnOffset(valueL, valueT); }; } if (Prototype.Browser.IE || Prototype.Browser.Opera) { // IE and Opera are missing .innerHTML support for TABLE-related and SELECT elements Element.Methods.update = function(element, content) { element = $(element); if (content && content.toElement) content = content.toElement(); if (Object.isElement(content)) return element.update().insert(content); content = Object.toHTML(content); var tagName = element.tagName.toUpperCase(); if (tagName in Element._insertionTranslations.tags) { $A(element.childNodes).each(function(node) { element.removeChild(node) }); Element._getContentFromAnonymousElement(tagName, content.stripScripts()) .each(function(node) { element.appendChild(node) }); } else element.innerHTML = content.stripScripts(); content.evalScripts.bind(content).defer(); return element; }; } if (document.createElement('div').outerHTML) { Element.Methods.replace = function(element, content) { element = $(element); if (content && content.toElement) content = content.toElement(); if (Object.isElement(content)) { element.parentNode.replaceChild(content, element); return element; } content = Object.toHTML(content); var parent = element.parentNode, tagName = parent.tagName.toUpperCase(); if (Element._insertionTranslations.tags[tagName]) { var nextSibling = element.next(); var fragments = Element._getContentFromAnonymousElement(tagName, content.stripScripts()); parent.removeChild(element); if (nextSibling) fragments.each(function(node) { parent.insertBefore(node, nextSibling) }); else fragments.each(function(node) { parent.appendChild(node) }); } else element.outerHTML = content.stripScripts(); content.evalScripts.bind(content).defer(); return element; }; } Element._returnOffset = function(l, t) { var result = [l, t]; result.left = l; result.top = t; return result; }; Element._getContentFromAnonymousElement = function(tagName, html) { var div = new Element('div'), t = Element._insertionTranslations.tags[tagName]; div.innerHTML = t[0] + html + t[1]; t[2].times(function() { div = div.firstChild }); return $A(div.childNodes); }; Element._insertionTranslations = { before: { adjacency: 'beforeBegin', insert: function(element, node) { element.parentNode.insertBefore(node, element); }, initializeRange: function(element, range) { range.setStartBefore(element); } }, top: { adjacency: 'afterBegin', insert: function(element, node) { element.insertBefore(node, element.firstChild); }, initializeRange: function(element, range) { range.selectNodeContents(element); range.collapse(true); } }, bottom: { adjacency: 'beforeEnd', insert: function(element, node) { element.appendChild(node); } }, after: { adjacency: 'afterEnd', insert: function(element, node) { element.parentNode.insertBefore(node, element.nextSibling); }, initializeRange: function(element, range) { range.setStartAfter(element); } }, tags: { TABLE: ['', '
', 1], TBODY: ['', '
', 2], TR: ['', '
', 3], TD: ['
', '
', 4], SELECT: ['', 1] } }; (function() { this.bottom.initializeRange = this.top.initializeRange; Object.extend(this.tags, { THEAD: this.tags.TBODY, TFOOT: this.tags.TBODY, TH: this.tags.TD }); }).call(Element._insertionTranslations); Element.Methods.Simulated = { hasAttribute: function(element, attribute) { attribute = Element._attributeTranslations.has[attribute] || attribute; var node = $(element).getAttributeNode(attribute); return node && node.specified; } }; Element.Methods.ByTag = { }; Object.extend(Element, Element.Methods); if (!Prototype.BrowserFeatures.ElementExtensions && document.createElement('div').__proto__) { window.HTMLElement = { }; window.HTMLElement.prototype = document.createElement('div').__proto__; Prototype.BrowserFeatures.ElementExtensions = true; } Element.extend = (function() { if (Prototype.BrowserFeatures.SpecificElementExtensions) return Prototype.K; var Methods = { }, ByTag = Element.Methods.ByTag; var extend = Object.extend(function(element) { if (!element || element._extendedByPrototype || element.nodeType != 1 || element == window) return element; var methods = Object.clone(Methods), tagName = element.tagName, property, value; // extend methods for specific tags if (ByTag[tagName]) Object.extend(methods, ByTag[tagName]); for (property in methods) { value = methods[property]; if (Object.isFunction(value) && !(property in element)) element[property] = value.methodize(); } element._extendedByPrototype = Prototype.emptyFunction; return element; }, { refresh: function() { // extend methods for all tags (Safari doesn't need this) if (!Prototype.BrowserFeatures.ElementExtensions) { Object.extend(Methods, Element.Methods); Object.extend(Methods, Element.Methods.Simulated); } } }); extend.refresh(); return extend; })(); Element.hasAttribute = function(element, attribute) { if (element.hasAttribute) return element.hasAttribute(attribute); return Element.Methods.Simulated.hasAttribute(element, attribute); }; Element.addMethods = function(methods) { var F = Prototype.BrowserFeatures, T = Element.Methods.ByTag; if (!methods) { Object.extend(Form, Form.Methods); Object.extend(Form.Element, Form.Element.Methods); Object.extend(Element.Methods.ByTag, { "FORM": Object.clone(Form.Methods), "INPUT": Object.clone(Form.Element.Methods), "SELECT": Object.clone(Form.Element.Methods), "TEXTAREA": Object.clone(Form.Element.Methods) }); } if (arguments.length == 2) { var tagName = methods; methods = arguments[1]; } if (!tagName) Object.extend(Element.Methods, methods || { }); else { if (Object.isArray(tagName)) tagName.each(extend); else extend(tagName); } function extend(tagName) { tagName = tagName.toUpperCase(); if (!Element.Methods.ByTag[tagName]) Element.Methods.ByTag[tagName] = { }; Object.extend(Element.Methods.ByTag[tagName], methods); } function copy(methods, destination, onlyIfAbsent) { onlyIfAbsent = onlyIfAbsent || false; for (var property in methods) { var value = methods[property]; if (!Object.isFunction(value)) continue; if (!onlyIfAbsent || !(property in destination)) destination[property] = value.methodize(); } } function findDOMClass(tagName) { var klass; var trans = { "OPTGROUP": "OptGroup", "TEXTAREA": "TextArea", "P": "Paragraph", "FIELDSET": "FieldSet", "UL": "UList", "OL": "OList", "DL": "DList", "DIR": "Directory", "H1": "Heading", "H2": "Heading", "H3": "Heading", "H4": "Heading", "H5": "Heading", "H6": "Heading", "Q": "Quote", "INS": "Mod", "DEL": "Mod", "A": "Anchor", "IMG": "Image", "CAPTION": "TableCaption", "COL": "TableCol", "COLGROUP": "TableCol", "THEAD": "TableSection", "TFOOT": "TableSection", "TBODY": "TableSection", "TR": "TableRow", "TH": "TableCell", "TD": "TableCell", "FRAMESET": "FrameSet", "IFRAME": "IFrame" }; if (trans[tagName]) klass = 'HTML' + trans[tagName] + 'Element'; if (window[klass]) return window[klass]; klass = 'HTML' + tagName + 'Element'; if (window[klass]) return window[klass]; klass = 'HTML' + tagName.capitalize() + 'Element'; if (window[klass]) return window[klass]; window[klass] = { }; window[klass].prototype = document.createElement(tagName).__proto__; return window[klass]; } if (F.ElementExtensions) { copy(Element.Methods, HTMLElement.prototype); copy(Element.Methods.Simulated, HTMLElement.prototype, true); } if (F.SpecificElementExtensions) { for (var tag in Element.Methods.ByTag) { var klass = findDOMClass(tag); if (Object.isUndefined(klass)) continue; copy(T[tag], klass.prototype); } } Object.extend(Element, Element.Methods); delete Element.ByTag; if (Element.extend.refresh) Element.extend.refresh(); Element.cache = { }; }; document.viewport = { getDimensions: function() { var dimensions = { }; $w('width height').each(function(d) { var D = d.capitalize(); dimensions[d] = self['inner' + D] || (document.documentElement['client' + D] || document.body['client' + D]); }); return dimensions; }, getWidth: function() { return this.getDimensions().width; }, getHeight: function() { return this.getDimensions().height; }, getScrollOffsets: function() { return Element._returnOffset( window.pageXOffset || document.documentElement.scrollLeft || document.body.scrollLeft, window.pageYOffset || document.documentElement.scrollTop || document.body.scrollTop); } }; /* Portions of the Selector class are derived from Jack Slocum’s DomQuery, * part of YUI-Ext version 0.40, distributed under the terms of an MIT-style * license. Please see http://www.yui-ext.com/ for more information. */ var Selector = Class.create({ initialize: function(expression) { this.expression = expression.strip(); this.compileMatcher(); }, compileMatcher: function() { // Selectors with namespaced attributes can't use the XPath version if (Prototype.BrowserFeatures.XPath && !(/(\[[\w-]*?:|:checked)/).test(this.expression)) return this.compileXPathMatcher(); var e = this.expression, ps = Selector.patterns, h = Selector.handlers, c = Selector.criteria, le, p, m; if (Selector._cache[e]) { this.matcher = Selector._cache[e]; return; } this.matcher = ["this.matcher = function(root) {", "var r = root, h = Selector.handlers, c = false, n;"]; while (e && le != e && (/\S/).test(e)) { le = e; for (var i in ps) { p = ps[i]; if (m = e.match(p)) { this.matcher.push(Object.isFunction(c[i]) ? c[i](m) : new Template(c[i]).evaluate(m)); e = e.replace(m[0], ''); break; } } } this.matcher.push("return h.unique(n);\n}"); eval(this.matcher.join('\n')); Selector._cache[this.expression] = this.matcher; }, compileXPathMatcher: function() { var e = this.expression, ps = Selector.patterns, x = Selector.xpath, le, m; if (Selector._cache[e]) { this.xpath = Selector._cache[e]; return; } this.matcher = ['.//*']; while (e && le != e && (/\S/).test(e)) { le = e; for (var i in ps) { if (m = e.match(ps[i])) { this.matcher.push(Object.isFunction(x[i]) ? x[i](m) : new Template(x[i]).evaluate(m)); e = e.replace(m[0], ''); break; } } } this.xpath = this.matcher.join(''); Selector._cache[this.expression] = this.xpath; }, findElements: function(root) { root = root || document; if (this.xpath) return document._getElementsByXPath(this.xpath, root); return this.matcher(root); }, match: function(element) { this.tokens = []; var e = this.expression, ps = Selector.patterns, as = Selector.assertions; var le, p, m; while (e && le !== e && (/\S/).test(e)) { le = e; for (var i in ps) { p = ps[i]; if (m = e.match(p)) { // use the Selector.assertions methods unless the selector // is too complex. if (as[i]) { this.tokens.push([i, Object.clone(m)]); e = e.replace(m[0], ''); } else { // reluctantly do a document-wide search // and look for a match in the array return this.findElements(document).include(element); } } } } var match = true, name, matches; for (var i = 0, token; token = this.tokens[i]; i++) { name = token[0], matches = token[1]; if (!Selector.assertions[name](element, matches)) { match = false; break; } } return match; }, toString: function() { return this.expression; }, inspect: function() { return "#"; } }); Object.extend(Selector, { _cache: { }, xpath: { descendant: "//*", child: "/*", adjacent: "/following-sibling::*[1]", laterSibling: '/following-sibling::*', tagName: function(m) { if (m[1] == '*') return ''; return "[local-name()='" + m[1].toLowerCase() + "' or local-name()='" + m[1].toUpperCase() + "']"; }, className: "[contains(concat(' ', @class, ' '), ' #{1} ')]", id: "[@id='#{1}']", attrPresence: "[@#{1}]", attr: function(m) { m[3] = m[5] || m[6]; return new Template(Selector.xpath.operators[m[2]]).evaluate(m); }, pseudo: function(m) { var h = Selector.xpath.pseudos[m[1]]; if (!h) return ''; if (Object.isFunction(h)) return h(m); return new Template(Selector.xpath.pseudos[m[1]]).evaluate(m); }, operators: { '=': "[@#{1}='#{3}']", '!=': "[@#{1}!='#{3}']", '^=': "[starts-with(@#{1}, '#{3}')]", '$=': "[substring(@#{1}, (string-length(@#{1}) - string-length('#{3}') + 1))='#{3}']", '*=': "[contains(@#{1}, '#{3}')]", '~=': "[contains(concat(' ', @#{1}, ' '), ' #{3} ')]", '|=': "[contains(concat('-', @#{1}, '-'), '-#{3}-')]" }, pseudos: { 'first-child': '[not(preceding-sibling::*)]', 'last-child': '[not(following-sibling::*)]', 'only-child': '[not(preceding-sibling::* or following-sibling::*)]', 'empty': "[count(*) = 0 and (count(text()) = 0 or translate(text(), ' \t\r\n', '') = '')]", 'checked': "[@checked]", 'disabled': "[@disabled]", 'enabled': "[not(@disabled)]", 'not': function(m) { var e = m[6], p = Selector.patterns, x = Selector.xpath, le, m, v; var exclusion = []; while (e && le != e && (/\S/).test(e)) { le = e; for (var i in p) { if (m = e.match(p[i])) { v = Object.isFunction(x[i]) ? x[i](m) : new Template(x[i]).evaluate(m); exclusion.push("(" + v.substring(1, v.length - 1) + ")"); e = e.replace(m[0], ''); break; } } } return "[not(" + exclusion.join(" and ") + ")]"; }, 'nth-child': function(m) { return Selector.xpath.pseudos.nth("(count(./preceding-sibling::*) + 1) ", m); }, 'nth-last-child': function(m) { return Selector.xpath.pseudos.nth("(count(./following-sibling::*) + 1) ", m); }, 'nth-of-type': function(m) { return Selector.xpath.pseudos.nth("position() ", m); }, 'nth-last-of-type': function(m) { return Selector.xpath.pseudos.nth("(last() + 1 - position()) ", m); }, 'first-of-type': function(m) { m[6] = "1"; return Selector.xpath.pseudos['nth-of-type'](m); }, 'last-of-type': function(m) { m[6] = "1"; return Selector.xpath.pseudos['nth-last-of-type'](m); }, 'only-of-type': function(m) { var p = Selector.xpath.pseudos; return p['first-of-type'](m) + p['last-of-type'](m); }, nth: function(fragment, m) { var mm, formula = m[6], predicate; if (formula == 'even') formula = '2n+0'; if (formula == 'odd') formula = '2n+1'; if (mm = formula.match(/^(\d+)$/)) // digit only return '[' + fragment + "= " + mm[1] + ']'; if (mm = formula.match(/^(-?\d*)?n(([+-])(\d+))?/)) { // an+b if (mm[1] == "-") mm[1] = -1; var a = mm[1] ? Number(mm[1]) : 1; var b = mm[2] ? Number(mm[2]) : 0; predicate = "[((#{fragment} - #{b}) mod #{a} = 0) and " + "((#{fragment} - #{b}) div #{a} >= 0)]"; return new Template(predicate).evaluate({ fragment: fragment, a: a, b: b }); } } } }, criteria: { tagName: 'n = h.tagName(n, r, "#{1}", c); c = false;', className: 'n = h.className(n, r, "#{1}", c); c = false;', id: 'n = h.id(n, r, "#{1}", c); c = false;', attrPresence: 'n = h.attrPresence(n, r, "#{1}"); c = false;', attr: function(m) { m[3] = (m[5] || m[6]); return new Template('n = h.attr(n, r, "#{1}", "#{3}", "#{2}"); c = false;').evaluate(m); }, pseudo: function(m) { if (m[6]) m[6] = m[6].replace(/"/g, '\\"'); return new Template('n = h.pseudo(n, "#{1}", "#{6}", r, c); c = false;').evaluate(m); }, descendant: 'c = "descendant";', child: 'c = "child";', adjacent: 'c = "adjacent";', laterSibling: 'c = "laterSibling";' }, patterns: { // combinators must be listed first // (and descendant needs to be last combinator) laterSibling: /^\s*~\s*/, child: /^\s*>\s*/, adjacent: /^\s*\+\s*/, descendant: /^\s/, // selectors follow tagName: /^\s*(\*|[\w\-]+)(\b|$)?/, id: /^#([\w\-\*]+)(\b|$)/, className: /^\.([\w\-\*]+)(\b|$)/, pseudo: /^:((first|last|nth|nth-last|only)(-child|-of-type)|empty|checked|(en|dis)abled|not)(\((.*?)\))?(\b|$|(?=\s)|(?=:))/, attrPresence: /^\[([\w]+)\]/, attr: /\[((?:[\w-]*:)?[\w-]+)\s*(?:([!^$*~|]?=)\s*((['"])([^\4]*?)\4|([^'"][^\]]*?)))?\]/ }, // for Selector.match and Element#match assertions: { tagName: function(element, matches) { return matches[1].toUpperCase() == element.tagName.toUpperCase(); }, className: function(element, matches) { return Element.hasClassName(element, matches[1]); }, id: function(element, matches) { return element.id === matches[1]; }, attrPresence: function(element, matches) { return Element.hasAttribute(element, matches[1]); }, attr: function(element, matches) { var nodeValue = Element.readAttribute(element, matches[1]); return Selector.operators[matches[2]](nodeValue, matches[3]); } }, handlers: { // UTILITY FUNCTIONS // joins two collections concat: function(a, b) { for (var i = 0, node; node = b[i]; i++) a.push(node); return a; }, // marks an array of nodes for counting mark: function(nodes) { for (var i = 0, node; node = nodes[i]; i++) node._counted = true; return nodes; }, unmark: function(nodes) { for (var i = 0, node; node = nodes[i]; i++) node._counted = undefined; return nodes; }, // mark each child node with its position (for nth calls) // "ofType" flag indicates whether we're indexing for nth-of-type // rather than nth-child index: function(parentNode, reverse, ofType) { parentNode._counted = true; if (reverse) { for (var nodes = parentNode.childNodes, i = nodes.length - 1, j = 1; i >= 0; i--) { var node = nodes[i]; if (node.nodeType == 1 && (!ofType || node._counted)) node.nodeIndex = j++; } } else { for (var i = 0, j = 1, nodes = parentNode.childNodes; node = nodes[i]; i++) if (node.nodeType == 1 && (!ofType || node._counted)) node.nodeIndex = j++; } }, // filters out duplicates and extends all nodes unique: function(nodes) { if (nodes.length == 0) return nodes; var results = [], n; for (var i = 0, l = nodes.length; i < l; i++) if (!(n = nodes[i])._counted) { n._counted = true; results.push(Element.extend(n)); } return Selector.handlers.unmark(results); }, // COMBINATOR FUNCTIONS descendant: function(nodes) { var h = Selector.handlers; for (var i = 0, results = [], node; node = nodes[i]; i++) h.concat(results, node.getElementsByTagName('*')); return results; }, child: function(nodes) { var h = Selector.handlers; for (var i = 0, results = [], node; node = nodes[i]; i++) { for (var j = 0, children = [], child; child = node.childNodes[j]; j++) if (child.nodeType == 1 && child.tagName != '!') results.push(child); } return results; }, adjacent: function(nodes) { for (var i = 0, results = [], node; node = nodes[i]; i++) { var next = this.nextElementSibling(node); if (next) results.push(next); } return results; }, laterSibling: function(nodes) { var h = Selector.handlers; for (var i = 0, results = [], node; node = nodes[i]; i++) h.concat(results, Element.nextSiblings(node)); return results; }, nextElementSibling: function(node) { while (node = node.nextSibling) if (node.nodeType == 1) return node; return null; }, previousElementSibling: function(node) { while (node = node.previousSibling) if (node.nodeType == 1) return node; return null; }, // TOKEN FUNCTIONS tagName: function(nodes, root, tagName, combinator) { tagName = tagName.toUpperCase(); var results = [], h = Selector.handlers; if (nodes) { if (combinator) { // fastlane for ordinary descendant combinators if (combinator == "descendant") { for (var i = 0, node; node = nodes[i]; i++) h.concat(results, node.getElementsByTagName(tagName)); return results; } else nodes = this[combinator](nodes); if (tagName == "*") return nodes; } for (var i = 0, node; node = nodes[i]; i++) if (node.tagName.toUpperCase() == tagName) results.push(node); return results; } else return root.getElementsByTagName(tagName); }, id: function(nodes, root, id, combinator) { var targetNode = $(id), h = Selector.handlers; if (!targetNode) return []; if (!nodes && root == document) return [targetNode]; if (nodes) { if (combinator) { if (combinator == 'child') { for (var i = 0, node; node = nodes[i]; i++) if (targetNode.parentNode == node) return [targetNode]; } else if (combinator == 'descendant') { for (var i = 0, node; node = nodes[i]; i++) if (Element.descendantOf(targetNode, node)) return [targetNode]; } else if (combinator == 'adjacent') { for (var i = 0, node; node = nodes[i]; i++) if (Selector.handlers.previousElementSibling(targetNode) == node) return [targetNode]; } else nodes = h[combinator](nodes); } for (var i = 0, node; node = nodes[i]; i++) if (node == targetNode) return [targetNode]; return []; } return (targetNode && Element.descendantOf(targetNode, root)) ? [targetNode] : []; }, className: function(nodes, root, className, combinator) { if (nodes && combinator) nodes = this[combinator](nodes); return Selector.handlers.byClassName(nodes, root, className); }, byClassName: function(nodes, root, className) { if (!nodes) nodes = Selector.handlers.descendant([root]); var needle = ' ' + className + ' '; for (var i = 0, results = [], node, nodeClassName; node = nodes[i]; i++) { nodeClassName = node.className; if (nodeClassName.length == 0) continue; if (nodeClassName == className || (' ' + nodeClassName + ' ').include(needle)) results.push(node); } return results; }, attrPresence: function(nodes, root, attr) { if (!nodes) nodes = root.getElementsByTagName("*"); var results = []; for (var i = 0, node; node = nodes[i]; i++) if (Element.hasAttribute(node, attr)) results.push(node); return results; }, attr: function(nodes, root, attr, value, operator) { if (!nodes) nodes = root.getElementsByTagName("*"); var handler = Selector.operators[operator], results = []; for (var i = 0, node; node = nodes[i]; i++) { var nodeValue = Element.readAttribute(node, attr); if (nodeValue === null) continue; if (handler(nodeValue, value)) results.push(node); } return results; }, pseudo: function(nodes, name, value, root, combinator) { if (nodes && combinator) nodes = this[combinator](nodes); if (!nodes) nodes = root.getElementsByTagName("*"); return Selector.pseudos[name](nodes, value, root); } }, pseudos: { 'first-child': function(nodes, value, root) { for (var i = 0, results = [], node; node = nodes[i]; i++) { if (Selector.handlers.previousElementSibling(node)) continue; results.push(node); } return results; }, 'last-child': function(nodes, value, root) { for (var i = 0, results = [], node; node = nodes[i]; i++) { if (Selector.handlers.nextElementSibling(node)) continue; results.push(node); } return results; }, 'only-child': function(nodes, value, root) { var h = Selector.handlers; for (var i = 0, results = [], node; node = nodes[i]; i++) if (!h.previousElementSibling(node) && !h.nextElementSibling(node)) results.push(node); return results; }, 'nth-child': function(nodes, formula, root) { return Selector.pseudos.nth(nodes, formula, root); }, 'nth-last-child': function(nodes, formula, root) { return Selector.pseudos.nth(nodes, formula, root, true); }, 'nth-of-type': function(nodes, formula, root) { return Selector.pseudos.nth(nodes, formula, root, false, true); }, 'nth-last-of-type': function(nodes, formula, root) { return Selector.pseudos.nth(nodes, formula, root, true, true); }, 'first-of-type': function(nodes, formula, root) { return Selector.pseudos.nth(nodes, "1", root, false, true); }, 'last-of-type': function(nodes, formula, root) { return Selector.pseudos.nth(nodes, "1", root, true, true); }, 'only-of-type': function(nodes, formula, root) { var p = Selector.pseudos; return p['last-of-type'](p['first-of-type'](nodes, formula, root), formula, root); }, // handles the an+b logic getIndices: function(a, b, total) { if (a == 0) return b > 0 ? [b] : []; return $R(1, total).inject([], function(memo, i) { if (0 == (i - b) % a && (i - b) / a >= 0) memo.push(i); return memo; }); }, // handles nth(-last)-child, nth(-last)-of-type, and (first|last)-of-type nth: function(nodes, formula, root, reverse, ofType) { if (nodes.length == 0) return []; if (formula == 'even') formula = '2n+0'; if (formula == 'odd') formula = '2n+1'; var h = Selector.handlers, results = [], indexed = [], m; h.mark(nodes); for (var i = 0, node; node = nodes[i]; i++) { if (!node.parentNode._counted) { h.index(node.parentNode, reverse, ofType); indexed.push(node.parentNode); } } if (formula.match(/^\d+$/)) { // just a number formula = Number(formula); for (var i = 0, node; node = nodes[i]; i++) if (node.nodeIndex == formula) results.push(node); } else if (m = formula.match(/^(-?\d*)?n(([+-])(\d+))?/)) { // an+b if (m[1] == "-") m[1] = -1; var a = m[1] ? Number(m[1]) : 1; var b = m[2] ? Number(m[2]) : 0; var indices = Selector.pseudos.getIndices(a, b, nodes.length); for (var i = 0, node, l = indices.length; node = nodes[i]; i++) { for (var j = 0; j < l; j++) if (node.nodeIndex == indices[j]) results.push(node); } } h.unmark(nodes); h.unmark(indexed); return results; }, 'empty': function(nodes, value, root) { for (var i = 0, results = [], node; node = nodes[i]; i++) { // IE treats comments as element nodes if (node.tagName == '!' || (node.firstChild && !node.innerHTML.match(/^\s*$/))) continue; results.push(node); } return results; }, 'not': function(nodes, selector, root) { var h = Selector.handlers, selectorType, m; var exclusions = new Selector(selector).findElements(root); h.mark(exclusions); for (var i = 0, results = [], node; node = nodes[i]; i++) if (!node._counted) results.push(node); h.unmark(exclusions); return results; }, 'enabled': function(nodes, value, root) { for (var i = 0, results = [], node; node = nodes[i]; i++) if (!node.disabled) results.push(node); return results; }, 'disabled': function(nodes, value, root) { for (var i = 0, results = [], node; node = nodes[i]; i++) if (node.disabled) results.push(node); return results; }, 'checked': function(nodes, value, root) { for (var i = 0, results = [], node; node = nodes[i]; i++) if (node.checked) results.push(node); return results; } }, operators: { '=': function(nv, v) { return nv == v; }, '!=': function(nv, v) { return nv != v; }, '^=': function(nv, v) { return nv.startsWith(v); }, '$=': function(nv, v) { return nv.endsWith(v); }, '*=': function(nv, v) { return nv.include(v); }, '~=': function(nv, v) { return (' ' + nv + ' ').include(' ' + v + ' '); }, '|=': function(nv, v) { return ('-' + nv.toUpperCase() + '-').include('-' + v.toUpperCase() + '-'); } }, matchElements: function(elements, expression) { var matches = new Selector(expression).findElements(), h = Selector.handlers; h.mark(matches); for (var i = 0, results = [], element; element = elements[i]; i++) if (element._counted) results.push(element); h.unmark(matches); return results; }, findElement: function(elements, expression, index) { if (Object.isNumber(expression)) { index = expression; expression = false; } return Selector.matchElements(elements, expression || '*')[index || 0]; }, findChildElements: function(element, expressions) { var exprs = expressions.join(','), expressions = []; exprs.scan(/(([\w#:.~>+()\s-]+|\*|\[.*?\])+)\s*(,|$)/, function(m) { expressions.push(m[1].strip()); }); var results = [], h = Selector.handlers; for (var i = 0, l = expressions.length, selector; i < l; i++) { selector = new Selector(expressions[i].strip()); h.concat(results, selector.findElements(element)); } return (l > 1) ? h.unique(results) : results; } }); function $$() { return Selector.findChildElements(document, $A(arguments)); } var Form = { reset: function(form) { $(form).reset(); return form; }, serializeElements: function(elements, options) { if (typeof options != 'object') options = { hash: !!options }; else if (options.hash === undefined) options.hash = true; var key, value, submitted = false, submit = options.submit; var data = elements.inject({ }, function(result, element) { if (!element.disabled && element.name) { key = element.name; value = $(element).getValue(); if (value != null && (element.type != 'submit' || (!submitted && submit !== false && (!submit || key == submit) && (submitted = true)))) { if (key in result) { // a key is already present; construct an array of values if (!Object.isArray(result[key])) result[key] = [result[key]]; result[key].push(value); } else result[key] = value; } } return result; }); return options.hash ? data : Object.toQueryString(data); } }; Form.Methods = { serialize: function(form, options) { return Form.serializeElements(Form.getElements(form), options); }, getElements: function(form) { return $A($(form).getElementsByTagName('*')).inject([], function(elements, child) { if (Form.Element.Serializers[child.tagName.toLowerCase()]) elements.push(Element.extend(child)); return elements; } ); }, getInputs: function(form, typeName, name) { form = $(form); var inputs = form.getElementsByTagName('input'); if (!typeName && !name) return $A(inputs).map(Element.extend); for (var i = 0, matchingInputs = [], length = inputs.length; i < length; i++) { var input = inputs[i]; if ((typeName && input.type != typeName) || (name && input.name != name)) continue; matchingInputs.push(Element.extend(input)); } return matchingInputs; }, disable: function(form) { form = $(form); Form.getElements(form).invoke('disable'); return form; }, enable: function(form) { form = $(form); Form.getElements(form).invoke('enable'); return form; }, findFirstElement: function(form) { var elements = $(form).getElements().findAll(function(element) { return 'hidden' != element.type && !element.disabled; }); var firstByIndex = elements.findAll(function(element) { return element.hasAttribute('tabIndex') && element.tabIndex >= 0; }).sortBy(function(element) { return element.tabIndex }).first(); return firstByIndex ? firstByIndex : elements.find(function(element) { return ['input', 'select', 'textarea'].include(element.tagName.toLowerCase()); }); }, focusFirstElement: function(form) { form = $(form); form.findFirstElement().activate(); return form; }, request: function(form, options) { form = $(form), options = Object.clone(options || { }); var params = options.parameters, action = form.readAttribute('action') || ''; if (action.blank()) action = window.location.href; options.parameters = form.serialize(true); if (params) { if (Object.isString(params)) params = params.toQueryParams(); Object.extend(options.parameters, params); } if (form.hasAttribute('method') && !options.method) options.method = form.method; return new Ajax.Request(action, options); } }; /*--------------------------------------------------------------------------*/ Form.Element = { focus: function(element) { $(element).focus(); return element; }, select: function(element) { $(element).select(); return element; } }; Form.Element.Methods = { serialize: function(element) { element = $(element); if (!element.disabled && element.name) { var value = element.getValue(); if (value != undefined) { var pair = { }; pair[element.name] = value; return Object.toQueryString(pair); } } return ''; }, getValue: function(element) { element = $(element); var method = element.tagName.toLowerCase(); return Form.Element.Serializers[method](element); }, setValue: function(element, value) { element = $(element); var method = element.tagName.toLowerCase(); Form.Element.Serializers[method](element, value); return element; }, clear: function(element) { $(element).value = ''; return element; }, present: function(element) { return $(element).value != ''; }, activate: function(element) { element = $(element); try { element.focus(); if (element.select && (element.tagName.toLowerCase() != 'input' || !['button', 'reset', 'submit'].include(element.type))) element.select(); } catch (e) { } return element; }, disable: function(element) { element = $(element); element.blur(); element.disabled = true; return element; }, enable: function(element) { element = $(element); element.disabled = false; return element; } }; /*--------------------------------------------------------------------------*/ var Field = Form.Element; var $F = Form.Element.Methods.getValue; /*--------------------------------------------------------------------------*/ Form.Element.Serializers = { input: function(element, value) { switch (element.type.toLowerCase()) { case 'checkbox': case 'radio': return Form.Element.Serializers.inputSelector(element, value); default: return Form.Element.Serializers.textarea(element, value); } }, inputSelector: function(element, value) { if (value === undefined) return element.checked ? element.value : null; else element.checked = !!value; }, textarea: function(element, value) { if (value === undefined) return element.value; else element.value = value; }, select: function(element, index) { if (index === undefined) return this[element.type == 'select-one' ? 'selectOne' : 'selectMany'](element); else { var opt, value, single = !Object.isArray(index); for (var i = 0, length = element.length; i < length; i++) { opt = element.options[i]; value = this.optionValue(opt); if (single) { if (value == index) { opt.selected = true; return; } } else opt.selected = index.include(value); } } }, selectOne: function(element) { var index = element.selectedIndex; return index >= 0 ? this.optionValue(element.options[index]) : null; }, selectMany: function(element) { var values, length = element.length; if (!length) return null; for (var i = 0, values = []; i < length; i++) { var opt = element.options[i]; if (opt.selected) values.push(this.optionValue(opt)); } return values; }, optionValue: function(opt) { // extend element because hasAttribute may not be native return Element.extend(opt).hasAttribute('value') ? opt.value : opt.text; } }; /*--------------------------------------------------------------------------*/ Abstract.TimedObserver = Class.create(PeriodicalExecuter, { initialize: function($super, element, frequency, callback) { $super(callback, frequency); this.element = $(element); this.lastValue = this.getValue(); }, execute: function() { var value = this.getValue(); if (Object.isString(this.lastValue) && Object.isString(value) ? this.lastValue != value : String(this.lastValue) != String(value)) { this.callback(this.element, value); this.lastValue = value; } } }); Form.Element.Observer = Class.create(Abstract.TimedObserver, { getValue: function() { return Form.Element.getValue(this.element); } }); Form.Observer = Class.create(Abstract.TimedObserver, { getValue: function() { return Form.serialize(this.element); } }); /*--------------------------------------------------------------------------*/ Abstract.EventObserver = Class.create({ initialize: function(element, callback) { this.element = $(element); this.callback = callback; this.lastValue = this.getValue(); if (this.element.tagName.toLowerCase() == 'form') this.registerFormCallbacks(); else this.registerCallback(this.element); }, onElementEvent: function() { var value = this.getValue(); if (this.lastValue != value) { this.callback(this.element, value); this.lastValue = value; } }, registerFormCallbacks: function() { Form.getElements(this.element).each(this.registerCallback, this); }, registerCallback: function(element) { if (element.type) { switch (element.type.toLowerCase()) { case 'checkbox': case 'radio': Event.observe(element, 'click', this.onElementEvent.bind(this)); break; default: Event.observe(element, 'change', this.onElementEvent.bind(this)); break; } } } }); Form.Element.EventObserver = Class.create(Abstract.EventObserver, { getValue: function() { return Form.Element.getValue(this.element); } }); Form.EventObserver = Class.create(Abstract.EventObserver, { getValue: function() { return Form.serialize(this.element); } }); if (!window.Event) var Event = { }; Object.extend(Event, { KEY_BACKSPACE: 8, KEY_TAB: 9, KEY_RETURN: 13, KEY_ESC: 27, KEY_LEFT: 37, KEY_UP: 38, KEY_RIGHT: 39, KEY_DOWN: 40, KEY_DELETE: 46, KEY_HOME: 36, KEY_END: 35, KEY_PAGEUP: 33, KEY_PAGEDOWN: 34, KEY_INSERT: 45, cache: { }, relatedTarget: function(event) { var element; switch(event.type) { case 'mouseover': element = event.fromElement; break; case 'mouseout': element = event.toElement; break; default: return null; } return Element.extend(element); } }); Event.Methods = (function() { var isButton; if (Prototype.Browser.IE) { var buttonMap = { 0: 1, 1: 4, 2: 2 }; isButton = function(event, code) { return event.button == buttonMap[code]; }; } else if (Prototype.Browser.WebKit) { isButton = function(event, code) { switch (code) { case 0: return event.which == 1 && !event.metaKey; case 1: return event.which == 1 && event.metaKey; default: return false; } }; } else { isButton = function(event, code) { return event.which ? (event.which === code + 1) : (event.button === code); }; } return { isLeftClick: function(event) { return isButton(event, 0) }, isMiddleClick: function(event) { return isButton(event, 1) }, isRightClick: function(event) { return isButton(event, 2) }, element: function(event) { var node = Event.extend(event).target; return Element.extend(node.nodeType == Node.TEXT_NODE ? node.parentNode : node); }, findElement: function(event, expression) { var element = Event.element(event); return element.match(expression) ? element : element.up(expression); }, pointer: function(event) { return { x: event.pageX || (event.clientX + (document.documentElement.scrollLeft || document.body.scrollLeft)), y: event.pageY || (event.clientY + (document.documentElement.scrollTop || document.body.scrollTop)) }; }, pointerX: function(event) { return Event.pointer(event).x }, pointerY: function(event) { return Event.pointer(event).y }, stop: function(event) { Event.extend(event); event.preventDefault(); event.stopPropagation(); event.stopped = true; } }; })(); Event.extend = (function() { var methods = Object.keys(Event.Methods).inject({ }, function(m, name) { m[name] = Event.Methods[name].methodize(); return m; }); if (Prototype.Browser.IE) { Object.extend(methods, { stopPropagation: function() { this.cancelBubble = true }, preventDefault: function() { this.returnValue = false }, inspect: function() { return "[object Event]" } }); return function(event) { if (!event) return false; if (event._extendedByPrototype) return event; event._extendedByPrototype = Prototype.emptyFunction; var pointer = Event.pointer(event); Object.extend(event, { target: event.srcElement, relatedTarget: Event.relatedTarget(event), pageX: pointer.x, pageY: pointer.y }); return Object.extend(event, methods); }; } else { Event.prototype = Event.prototype || document.createEvent("HTMLEvents").__proto__; Object.extend(Event.prototype, methods); return Prototype.K; } })(); Object.extend(Event, (function() { var cache = Event.cache; function getEventID(element) { if (element._eventID) return element._eventID; arguments.callee.id = arguments.callee.id || 1; return element._eventID = ++arguments.callee.id; } function getDOMEventName(eventName) { if (eventName && eventName.include(':')) return "dataavailable"; return eventName; } function getCacheForID(id) { return cache[id] = cache[id] || { }; } function getWrappersForEventName(id, eventName) { var c = getCacheForID(id); return c[eventName] = c[eventName] || []; } function createWrapper(element, eventName, handler) { var id = getEventID(element); var c = getWrappersForEventName(id, eventName); if (c.pluck("handler").include(handler)) return false; var wrapper = function(event) { if (!Event || !Event.extend || (event.eventName && event.eventName != eventName)) return false; Event.extend(event); handler.call(element, event) }; wrapper.handler = handler; c.push(wrapper); return wrapper; } function findWrapper(id, eventName, handler) { var c = getWrappersForEventName(id, eventName); return c.find(function(wrapper) { return wrapper.handler == handler }); } function destroyWrapper(id, eventName, handler) { var c = getCacheForID(id); if (!c[eventName]) return false; c[eventName] = c[eventName].without(findWrapper(id, eventName, handler)); } function destroyCache() { for (var id in cache) for (var eventName in cache[id]) cache[id][eventName] = null; } if (window.attachEvent) { window.attachEvent("onunload", destroyCache); } return { observe: function(element, eventName, handler) { element = $(element); var name = getDOMEventName(eventName); var wrapper = createWrapper(element, eventName, handler); if (!wrapper) return element; if (element.addEventListener) { element.addEventListener(name, wrapper, false); } else { element.attachEvent("on" + name, wrapper); } return element; }, stopObserving: function(element, eventName, handler) { element = $(element); var id = getEventID(element), name = getDOMEventName(eventName); if (!handler && eventName) { getWrappersForEventName(id, eventName).each(function(wrapper) { element.stopObserving(eventName, wrapper.handler); }); return element; } else if (!eventName) { Object.keys(getCacheForID(id)).each(function(eventName) { element.stopObserving(eventName); }); return element; } var wrapper = findWrapper(id, eventName, handler); if (!wrapper) return element; if (element.removeEventListener) { element.removeEventListener(name, wrapper, false); } else { element.detachEvent("on" + name, wrapper); } destroyWrapper(id, eventName, handler); return element; }, fire: function(element, eventName, memo) { element = $(element); if (element == document && document.createEvent && !element.dispatchEvent) element = document.documentElement; if (document.createEvent) { var event = document.createEvent("HTMLEvents"); event.initEvent("dataavailable", true, true); } else { var event = document.createEventObject(); event.eventType = "ondataavailable"; } event.eventName = eventName; event.memo = memo || { }; if (document.createEvent) { element.dispatchEvent(event); } else { element.fireEvent(event.eventType, event); } return event; } }; })()); Object.extend(Event, Event.Methods); Element.addMethods({ fire: Event.fire, observe: Event.observe, stopObserving: Event.stopObserving }); Object.extend(document, { fire: Element.Methods.fire.methodize(), observe: Element.Methods.observe.methodize(), stopObserving: Element.Methods.stopObserving.methodize() }); (function() { /* Support for the DOMContentLoaded event is based on work by Dan Webb, Matthias Miller, Dean Edwards and John Resig. */ var timer, fired = false; function fireContentLoadedEvent() { if (fired) return; if (timer) window.clearInterval(timer); document.fire("dom:loaded"); fired = true; } if (document.addEventListener) { if (Prototype.Browser.WebKit) { timer = window.setInterval(function() { if (/loaded|complete/.test(document.readyState)) fireContentLoadedEvent(); }, 0); Event.observe(window, "load", fireContentLoadedEvent); } else { document.addEventListener("DOMContentLoaded", fireContentLoadedEvent, false); } } else { document.write("

Fetching object from server

Wait...
ruby-json-2.1.0+dfsg.orig/lib/0000755000175000017500000000000013113111601015430 5ustar boutilboutilruby-json-2.1.0+dfsg.orig/lib/json.rb0000644000175000017500000000342113113111601016726 0ustar boutilboutil#frozen_string_literal: false require 'json/common' ## # = JavaScript Object Notation (JSON) # # JSON is a lightweight data-interchange format. It is easy for us # humans to read and write. Plus, equally simple for machines to generate or parse. # JSON is completely language agnostic, making it the ideal interchange format. # # Built on two universally available structures: # 1. A collection of name/value pairs. Often referred to as an _object_, hash table, record, struct, keyed list, or associative array. # 2. An ordered list of values. More commonly called an _array_, vector, sequence or list. # # To read more about JSON visit: http://json.org # # == Parsing JSON # # To parse a JSON string received by another application or generated within # your existing application: # # require 'json' # # my_hash = JSON.parse('{"hello": "goodbye"}') # puts my_hash["hello"] => "goodbye" # # Notice the extra quotes '' around the hash notation. Ruby expects # the argument to be a string and can't convert objects like a hash or array. # # Ruby converts your string into a hash # # == Generating JSON # # Creating a JSON string for communication or serialization is # just as simple. # # require 'json' # # my_hash = {:hello => "goodbye"} # puts JSON.generate(my_hash) => "{\"hello\":\"goodbye\"}" # # Or an alternative way: # # require 'json' # puts {:hello => "goodbye"}.to_json => "{\"hello\":\"goodbye\"}" # # JSON.generate only allows objects or arrays to be converted # to JSON syntax. to_json, however, accepts many Ruby classes # even though it acts only as a method for serialization: # # require 'json' # # 1.to_json => "1" # module JSON require 'json/version' begin require 'json/ext' rescue LoadError require 'json/pure' end end ruby-json-2.1.0+dfsg.orig/lib/json/0000755000175000017500000000000013113111601016401 5ustar boutilboutilruby-json-2.1.0+dfsg.orig/lib/json/generic_object.rb0000644000175000017500000000262413113111601021674 0ustar boutilboutil#frozen_string_literal: false require 'ostruct' module JSON class GenericObject < OpenStruct class << self alias [] new def json_creatable? @json_creatable end attr_writer :json_creatable def json_create(data) data = data.dup data.delete JSON.create_id self[data] end def from_hash(object) case when object.respond_to?(:to_hash) result = new object.to_hash.each do |key, value| result[key] = from_hash(value) end result when object.respond_to?(:to_ary) object.to_ary.map { |a| from_hash(a) } else object end end def load(source, proc = nil, opts = {}) result = ::JSON.load(source, proc, opts.merge(:object_class => self)) result.nil? ? new : result end def dump(obj, *args) ::JSON.dump(obj, *args) end end self.json_creatable = false def to_hash table end def [](name) __send__(name) end unless method_defined?(:[]) def []=(name, value) __send__("#{name}=", value) end unless method_defined?(:[]=) def |(other) self.class[other.to_hash.merge(to_hash)] end def as_json(*) { JSON.create_id => self.class.name }.merge to_hash end def to_json(*a) as_json.to_json(*a) end end end ruby-json-2.1.0+dfsg.orig/lib/json/ext.rb0000644000175000017500000000060713113111601017531 0ustar boutilboutilrequire 'json/common' module JSON # This module holds all the modules/classes that implement JSON's # functionality as C extensions. module Ext require 'json/ext/parser' require 'json/ext/generator' $DEBUG and warn "Using Ext extension for JSON." JSON.parser = Parser JSON.generator = Generator end JSON_LOADED = true unless defined?(::JSON::JSON_LOADED) end ruby-json-2.1.0+dfsg.orig/lib/json/ext/0000755000175000017500000000000013113111601017201 5ustar boutilboutilruby-json-2.1.0+dfsg.orig/lib/json/ext/.keep0000644000175000017500000000000013113111601020114 0ustar boutilboutilruby-json-2.1.0+dfsg.orig/lib/json/add/0000755000175000017500000000000013113111601017131 5ustar boutilboutilruby-json-2.1.0+dfsg.orig/lib/json/add/complex.rb0000644000175000017500000000135513113111601021131 0ustar boutilboutil#frozen_string_literal: false unless defined?(::JSON::JSON_LOADED) and ::JSON::JSON_LOADED require 'json' end defined?(::Complex) or require 'complex' class Complex # Deserializes JSON string by converting Real value r, imaginary # value i, to a Complex object. def self.json_create(object) Complex(object['r'], object['i']) end # Returns a hash, that will be turned into a JSON object and represent this # object. def as_json(*) { JSON.create_id => self.class.name, 'r' => real, 'i' => imag, } end # Stores class name (Complex) along with real value r and imaginary value i as JSON string def to_json(*) as_json.to_json end end ruby-json-2.1.0+dfsg.orig/lib/json/add/rational.rb0000644000175000017500000000141413113111601021267 0ustar boutilboutil#frozen_string_literal: false unless defined?(::JSON::JSON_LOADED) and ::JSON::JSON_LOADED require 'json' end defined?(::Rational) or require 'rational' class Rational # Deserializes JSON string by converting numerator value n, # denominator value d, to a Rational object. def self.json_create(object) Rational(object['n'], object['d']) end # Returns a hash, that will be turned into a JSON object and represent this # object. def as_json(*) { JSON.create_id => self.class.name, 'n' => numerator, 'd' => denominator, } end # Stores class name (Rational) along with numerator value n and denominator value d as JSON string def to_json(*) as_json.to_json end end ruby-json-2.1.0+dfsg.orig/lib/json/add/exception.rb0000644000175000017500000000143713113111601021461 0ustar boutilboutil#frozen_string_literal: false unless defined?(::JSON::JSON_LOADED) and ::JSON::JSON_LOADED require 'json' end class Exception # Deserializes JSON string by constructing new Exception object with message # m and backtrace b serialized with to_json def self.json_create(object) result = new(object['m']) result.set_backtrace object['b'] result end # Returns a hash, that will be turned into a JSON object and represent this # object. def as_json(*) { JSON.create_id => self.class.name, 'm' => message, 'b' => backtrace, } end # Stores class name (Exception) with message m and backtrace array # b as JSON string def to_json(*args) as_json.to_json(*args) end end ruby-json-2.1.0+dfsg.orig/lib/json/add/symbol.rb0000644000175000017500000000114613113111601020765 0ustar boutilboutil#frozen_string_literal: false unless defined?(::JSON::JSON_LOADED) and ::JSON::JSON_LOADED require 'json' end class Symbol # Returns a hash, that will be turned into a JSON object and represent this # object. def as_json(*) { JSON.create_id => self.class.name, 's' => to_s, } end # Stores class name (Symbol) with String representation of Symbol as a JSON string. def to_json(*a) as_json.to_json(*a) end # Deserializes JSON string by converting the string value stored in the object to a Symbol def self.json_create(o) o['s'].to_sym end end ruby-json-2.1.0+dfsg.orig/lib/json/add/regexp.rb0000644000175000017500000000137013113111601020751 0ustar boutilboutil#frozen_string_literal: false unless defined?(::JSON::JSON_LOADED) and ::JSON::JSON_LOADED require 'json' end class Regexp # Deserializes JSON string by constructing new Regexp object with source # s (Regexp or String) and options o serialized by # to_json def self.json_create(object) new(object['s'], object['o']) end # Returns a hash, that will be turned into a JSON object and represent this # object. def as_json(*) { JSON.create_id => self.class.name, 'o' => options, 's' => source, } end # Stores class name (Regexp) with options o and source s # (Regexp or String) as JSON string def to_json(*) as_json.to_json end end ruby-json-2.1.0+dfsg.orig/lib/json/add/date_time.rb0000644000175000017500000000246013113111601021413 0ustar boutilboutil#frozen_string_literal: false unless defined?(::JSON::JSON_LOADED) and ::JSON::JSON_LOADED require 'json' end require 'date' class DateTime # Deserializes JSON string by converting year y, month m, # day d, hour H, minute M, second S, # offset of and Day of Calendar Reform sg to DateTime. def self.json_create(object) args = object.values_at('y', 'm', 'd', 'H', 'M', 'S') of_a, of_b = object['of'].split('/') if of_b and of_b != '0' args << Rational(of_a.to_i, of_b.to_i) else args << of_a end args << object['sg'] civil(*args) end alias start sg unless method_defined?(:start) # Returns a hash, that will be turned into a JSON object and represent this # object. def as_json(*) { JSON.create_id => self.class.name, 'y' => year, 'm' => month, 'd' => day, 'H' => hour, 'M' => min, 'S' => sec, 'of' => offset.to_s, 'sg' => start, } end # Stores class name (DateTime) with Julian year y, month m, # day d, hour H, minute M, second S, # offset of and Day of Calendar Reform sg as JSON string def to_json(*args) as_json.to_json(*args) end end ruby-json-2.1.0+dfsg.orig/lib/json/add/range.rb0000644000175000017500000000142213113111601020551 0ustar boutilboutil#frozen_string_literal: false unless defined?(::JSON::JSON_LOADED) and ::JSON::JSON_LOADED require 'json' end class Range # Deserializes JSON string by constructing new Range object with arguments # a serialized by to_json. def self.json_create(object) new(*object['a']) end # Returns a hash, that will be turned into a JSON object and represent this # object. def as_json(*) { JSON.create_id => self.class.name, 'a' => [ first, last, exclude_end? ] } end # Stores class name (Range) with JSON array of arguments a which # include first (integer), last (integer), and # exclude_end? (boolean) as JSON string. def to_json(*args) as_json.to_json(*args) end end ruby-json-2.1.0+dfsg.orig/lib/json/add/struct.rb0000644000175000017500000000142213113111601021001 0ustar boutilboutil#frozen_string_literal: false unless defined?(::JSON::JSON_LOADED) and ::JSON::JSON_LOADED require 'json' end class Struct # Deserializes JSON string by constructing new Struct object with values # v serialized by to_json. def self.json_create(object) new(*object['v']) end # Returns a hash, that will be turned into a JSON object and represent this # object. def as_json(*) klass = self.class.name klass.to_s.empty? and raise JSON::JSONError, "Only named structs are supported!" { JSON.create_id => klass, 'v' => values, } end # Stores class name (Struct) with Struct values v as a JSON string. # Only named structs are supported. def to_json(*args) as_json.to_json(*args) end end ruby-json-2.1.0+dfsg.orig/lib/json/add/bigdecimal.rb0000644000175000017500000000112613113111601021536 0ustar boutilboutil#frozen_string_literal: false unless defined?(::JSON::JSON_LOADED) and ::JSON::JSON_LOADED require 'json' end defined?(::BigDecimal) or require 'bigdecimal' class BigDecimal # Import a JSON Marshalled object. # # method used for JSON marshalling support. def self.json_create(object) BigDecimal._load object['b'] end # Marshal the object to JSON. # # method used for JSON marshalling support. def as_json(*) { JSON.create_id => self.class.name, 'b' => _dump, } end # return the JSON value def to_json(*) as_json.to_json end end ruby-json-2.1.0+dfsg.orig/lib/json/add/ostruct.rb0000644000175000017500000000143513113111601021164 0ustar boutilboutil#frozen_string_literal: false unless defined?(::JSON::JSON_LOADED) and ::JSON::JSON_LOADED require 'json' end require 'ostruct' class OpenStruct # Deserializes JSON string by constructing new Struct object with values # t serialized by to_json. def self.json_create(object) new(object['t'] || object[:t]) end # Returns a hash, that will be turned into a JSON object and represent this # object. def as_json(*) klass = self.class.name klass.to_s.empty? and raise JSON::JSONError, "Only named structs are supported!" { JSON.create_id => klass, 't' => table, } end # Stores class name (OpenStruct) with this struct's values v as a # JSON string. def to_json(*args) as_json.to_json(*args) end end ruby-json-2.1.0+dfsg.orig/lib/json/add/time.rb0000644000175000017500000000177013113111601020421 0ustar boutilboutil#frozen_string_literal: false unless defined?(::JSON::JSON_LOADED) and ::JSON::JSON_LOADED require 'json' end class Time # Deserializes JSON string by converting time since epoch to Time def self.json_create(object) if usec = object.delete('u') # used to be tv_usec -> tv_nsec object['n'] = usec * 1000 end if method_defined?(:tv_nsec) at(object['s'], Rational(object['n'], 1000)) else at(object['s'], object['n'] / 1000) end end # Returns a hash, that will be turned into a JSON object and represent this # object. def as_json(*) nanoseconds = [ tv_usec * 1000 ] respond_to?(:tv_nsec) and nanoseconds << tv_nsec nanoseconds = nanoseconds.max { JSON.create_id => self.class.name, 's' => tv_sec, 'n' => nanoseconds, } end # Stores class name (Time) with number of seconds since epoch and number of # microseconds for Time as JSON string def to_json(*args) as_json.to_json(*args) end end ruby-json-2.1.0+dfsg.orig/lib/json/add/date.rb0000644000175000017500000000156113113111601020376 0ustar boutilboutil#frozen_string_literal: false unless defined?(::JSON::JSON_LOADED) and ::JSON::JSON_LOADED require 'json' end require 'date' class Date # Deserializes JSON string by converting Julian year y, month # m, day d and Day of Calendar Reform sg to Date. def self.json_create(object) civil(*object.values_at('y', 'm', 'd', 'sg')) end alias start sg unless method_defined?(:start) # Returns a hash, that will be turned into a JSON object and represent this # object. def as_json(*) { JSON.create_id => self.class.name, 'y' => year, 'm' => month, 'd' => day, 'sg' => start, } end # Stores class name (Date) with Julian year y, month m, day # d and Day of Calendar Reform sg as JSON string def to_json(*args) as_json.to_json(*args) end end ruby-json-2.1.0+dfsg.orig/lib/json/add/core.rb0000644000175000017500000000053413113111601020410 0ustar boutilboutil#frozen_string_literal: false # This file requires the implementations of ruby core's custom objects for # serialisation/deserialisation. require 'json/add/date' require 'json/add/date_time' require 'json/add/exception' require 'json/add/range' require 'json/add/regexp' require 'json/add/struct' require 'json/add/symbol' require 'json/add/time' ruby-json-2.1.0+dfsg.orig/lib/json/pure/0000755000175000017500000000000013113111601017354 5ustar boutilboutilruby-json-2.1.0+dfsg.orig/lib/json/pure/parser.rb0000644000175000017500000002453213113111601021203 0ustar boutilboutil#frozen_string_literal: false require 'strscan' module JSON module Pure # This class implements the JSON parser that is used to parse a JSON string # into a Ruby data structure. class Parser < StringScanner STRING = /" ((?:[^\x0-\x1f"\\] | # escaped special characters: \\["\\\/bfnrt] | \\u[0-9a-fA-F]{4} | # match all but escaped special characters: \\[\x20-\x21\x23-\x2e\x30-\x5b\x5d-\x61\x63-\x65\x67-\x6d\x6f-\x71\x73\x75-\xff])*) "/nx INTEGER = /(-?0|-?[1-9]\d*)/ FLOAT = /(-? (?:0|[1-9]\d*) (?: \.\d+(?i:e[+-]?\d+) | \.\d+ | (?i:e[+-]?\d+) ) )/x NAN = /NaN/ INFINITY = /Infinity/ MINUS_INFINITY = /-Infinity/ OBJECT_OPEN = /\{/ OBJECT_CLOSE = /\}/ ARRAY_OPEN = /\[/ ARRAY_CLOSE = /\]/ PAIR_DELIMITER = /:/ COLLECTION_DELIMITER = /,/ TRUE = /true/ FALSE = /false/ NULL = /null/ IGNORE = %r( (?: //[^\n\r]*[\n\r]| # line comments /\* # c-style comments (?: [^*/]| # normal chars /[^*]| # slashes that do not start a nested comment \*[^/]| # asterisks that do not end this comment /(?=\*/) # single slash before this comment's end )* \*/ # the End of this comment |[ \t\r\n]+ # whitespaces: space, horicontal tab, lf, cr )+ )mx UNPARSED = Object.new.freeze # Creates a new JSON::Pure::Parser instance for the string _source_. # # It will be configured by the _opts_ hash. _opts_ can have the following # keys: # * *max_nesting*: The maximum depth of nesting allowed in the parsed data # structures. Disable depth checking with :max_nesting => false|nil|0, # it defaults to 100. # * *allow_nan*: If set to true, allow NaN, Infinity and -Infinity in # defiance of RFC 7159 to be parsed by the Parser. This option defaults # to false. # * *symbolize_names*: If set to true, returns symbols for the names # (keys) in a JSON object. Otherwise strings are returned, which is # also the default. It's not possible to use this option in # conjunction with the *create_additions* option. # * *create_additions*: If set to true, the Parser creates # additions when if a matching class and create_id was found. This # option defaults to false. # * *object_class*: Defaults to Hash # * *array_class*: Defaults to Array # * *decimal_class*: Specifies which class to use instead of the default # (Float) when parsing decimal numbers. This class must accept a single # string argument in its constructor. def initialize(source, opts = {}) opts ||= {} source = convert_encoding source super source if !opts.key?(:max_nesting) # defaults to 100 @max_nesting = 100 elsif opts[:max_nesting] @max_nesting = opts[:max_nesting] else @max_nesting = 0 end @allow_nan = !!opts[:allow_nan] @symbolize_names = !!opts[:symbolize_names] if opts.key?(:create_additions) @create_additions = !!opts[:create_additions] else @create_additions = false end @symbolize_names && @create_additions and raise ArgumentError, 'options :symbolize_names and :create_additions cannot be used '\ 'in conjunction' @create_id = @create_additions ? JSON.create_id : nil @object_class = opts[:object_class] || Hash @array_class = opts[:array_class] || Array @decimal_class = opts[:decimal_class] @match_string = opts[:match_string] end alias source string def reset super @current_nesting = 0 end # Parses the current JSON string _source_ and returns the # complete data structure as a result. def parse reset obj = nil while !eos? && skip(IGNORE) do end if eos? raise ParserError, "source is not valid JSON!" else obj = parse_value UNPARSED.equal?(obj) and raise ParserError, "source is not valid JSON!" end while !eos? && skip(IGNORE) do end eos? or raise ParserError, "source is not valid JSON!" obj end private def convert_encoding(source) if source.respond_to?(:to_str) source = source.to_str else raise TypeError, "#{source.inspect} is not like a string" end if source.encoding != ::Encoding::ASCII_8BIT source = source.encode(::Encoding::UTF_8) source.force_encoding(::Encoding::ASCII_8BIT) end source end # Unescape characters in strings. UNESCAPE_MAP = Hash.new { |h, k| h[k] = k.chr } UNESCAPE_MAP.update({ ?" => '"', ?\\ => '\\', ?/ => '/', ?b => "\b", ?f => "\f", ?n => "\n", ?r => "\r", ?t => "\t", ?u => nil, }) EMPTY_8BIT_STRING = '' if ::String.method_defined?(:encode) EMPTY_8BIT_STRING.force_encoding Encoding::ASCII_8BIT end def parse_string if scan(STRING) return '' if self[1].empty? string = self[1].gsub(%r((?:\\[\\bfnrt"/]|(?:\\u(?:[A-Fa-f\d]{4}))+|\\[\x20-\xff]))n) do |c| if u = UNESCAPE_MAP[$&[1]] u else # \uXXXX bytes = EMPTY_8BIT_STRING.dup i = 0 while c[6 * i] == ?\\ && c[6 * i + 1] == ?u bytes << c[6 * i + 2, 2].to_i(16) << c[6 * i + 4, 2].to_i(16) i += 1 end JSON.iconv('utf-8', 'utf-16be', bytes) end end if string.respond_to?(:force_encoding) string.force_encoding(::Encoding::UTF_8) end if @create_additions and @match_string for (regexp, klass) in @match_string klass.json_creatable? or next string =~ regexp and return klass.json_create(string) end end string else UNPARSED end rescue => e raise ParserError, "Caught #{e.class} at '#{peek(20)}': #{e}" end def parse_value case when scan(FLOAT) @decimal_class && @decimal_class.new(self[1]) || Float(self[1]) when scan(INTEGER) Integer(self[1]) when scan(TRUE) true when scan(FALSE) false when scan(NULL) nil when !UNPARSED.equal?(string = parse_string) string when scan(ARRAY_OPEN) @current_nesting += 1 ary = parse_array @current_nesting -= 1 ary when scan(OBJECT_OPEN) @current_nesting += 1 obj = parse_object @current_nesting -= 1 obj when @allow_nan && scan(NAN) NaN when @allow_nan && scan(INFINITY) Infinity when @allow_nan && scan(MINUS_INFINITY) MinusInfinity else UNPARSED end end def parse_array raise NestingError, "nesting of #@current_nesting is too deep" if @max_nesting.nonzero? && @current_nesting > @max_nesting result = @array_class.new delim = false until eos? case when !UNPARSED.equal?(value = parse_value) delim = false result << value skip(IGNORE) if scan(COLLECTION_DELIMITER) delim = true elsif match?(ARRAY_CLOSE) ; else raise ParserError, "expected ',' or ']' in array at '#{peek(20)}'!" end when scan(ARRAY_CLOSE) if delim raise ParserError, "expected next element in array at '#{peek(20)}'!" end break when skip(IGNORE) ; else raise ParserError, "unexpected token in array at '#{peek(20)}'!" end end result end def parse_object raise NestingError, "nesting of #@current_nesting is too deep" if @max_nesting.nonzero? && @current_nesting > @max_nesting result = @object_class.new delim = false until eos? case when !UNPARSED.equal?(string = parse_string) skip(IGNORE) unless scan(PAIR_DELIMITER) raise ParserError, "expected ':' in object at '#{peek(20)}'!" end skip(IGNORE) unless UNPARSED.equal?(value = parse_value) result[@symbolize_names ? string.to_sym : string] = value delim = false skip(IGNORE) if scan(COLLECTION_DELIMITER) delim = true elsif match?(OBJECT_CLOSE) ; else raise ParserError, "expected ',' or '}' in object at '#{peek(20)}'!" end else raise ParserError, "expected value in object at '#{peek(20)}'!" end when scan(OBJECT_CLOSE) if delim raise ParserError, "expected next name, value pair in object at '#{peek(20)}'!" end if @create_additions and klassname = result[@create_id] klass = JSON.deep_const_get klassname break unless klass and klass.json_creatable? result = klass.json_create(result) end break when skip(IGNORE) ; else raise ParserError, "unexpected token in object at '#{peek(20)}'!" end end result end end end end ruby-json-2.1.0+dfsg.orig/lib/json/pure/generator.rb0000644000175000017500000003532713113111601021701 0ustar boutilboutil#frozen_string_literal: false module JSON MAP = { "\x0" => '\u0000', "\x1" => '\u0001', "\x2" => '\u0002', "\x3" => '\u0003', "\x4" => '\u0004', "\x5" => '\u0005', "\x6" => '\u0006', "\x7" => '\u0007', "\b" => '\b', "\t" => '\t', "\n" => '\n', "\xb" => '\u000b', "\f" => '\f', "\r" => '\r', "\xe" => '\u000e', "\xf" => '\u000f', "\x10" => '\u0010', "\x11" => '\u0011', "\x12" => '\u0012', "\x13" => '\u0013', "\x14" => '\u0014', "\x15" => '\u0015', "\x16" => '\u0016', "\x17" => '\u0017', "\x18" => '\u0018', "\x19" => '\u0019', "\x1a" => '\u001a', "\x1b" => '\u001b', "\x1c" => '\u001c', "\x1d" => '\u001d', "\x1e" => '\u001e', "\x1f" => '\u001f', '"' => '\"', '\\' => '\\\\', } # :nodoc: # Convert a UTF8 encoded Ruby string _string_ to a JSON string, encoded with # UTF16 big endian characters as \u????, and return it. def utf8_to_json(string) # :nodoc: string = string.dup string.force_encoding(::Encoding::ASCII_8BIT) string.gsub!(/["\\\x0-\x1f]/) { MAP[$&] } string.force_encoding(::Encoding::UTF_8) string end def utf8_to_json_ascii(string) # :nodoc: string = string.dup string.force_encoding(::Encoding::ASCII_8BIT) string.gsub!(/["\\\x0-\x1f]/n) { MAP[$&] } string.gsub!(/( (?: [\xc2-\xdf][\x80-\xbf] | [\xe0-\xef][\x80-\xbf]{2} | [\xf0-\xf4][\x80-\xbf]{3} )+ | [\x80-\xc1\xf5-\xff] # invalid )/nx) { |c| c.size == 1 and raise GeneratorError, "invalid utf8 byte: '#{c}'" s = JSON.iconv('utf-16be', 'utf-8', c).unpack('H*')[0] s.force_encoding(::Encoding::ASCII_8BIT) s.gsub!(/.{4}/n, '\\\\u\&') s.force_encoding(::Encoding::UTF_8) } string.force_encoding(::Encoding::UTF_8) string rescue => e raise GeneratorError.wrap(e) end def valid_utf8?(string) encoding = string.encoding (encoding == Encoding::UTF_8 || encoding == Encoding::ASCII) && string.valid_encoding? end module_function :utf8_to_json, :utf8_to_json_ascii, :valid_utf8? module Pure module Generator # This class is used to create State instances, that are use to hold data # while generating a JSON text from a Ruby data structure. class State # Creates a State object from _opts_, which ought to be Hash to create # a new State instance configured by _opts_, something else to create # an unconfigured instance. If _opts_ is a State object, it is just # returned. def self.from_state(opts) case when self === opts opts when opts.respond_to?(:to_hash) new(opts.to_hash) when opts.respond_to?(:to_h) new(opts.to_h) else SAFE_STATE_PROTOTYPE.dup end end # Instantiates a new State object, configured by _opts_. # # _opts_ can have the following keys: # # * *indent*: a string used to indent levels (default: ''), # * *space*: a string that is put after, a : or , delimiter (default: ''), # * *space_before*: a string that is put before a : pair delimiter (default: ''), # * *object_nl*: a string that is put at the end of a JSON object (default: ''), # * *array_nl*: a string that is put at the end of a JSON array (default: ''), # * *check_circular*: is deprecated now, use the :max_nesting option instead, # * *max_nesting*: sets the maximum level of data structure nesting in # the generated JSON, max_nesting = 0 if no maximum should be checked. # * *allow_nan*: true if NaN, Infinity, and -Infinity should be # generated, otherwise an exception is thrown, if these values are # encountered. This options defaults to false. def initialize(opts = {}) @indent = '' @space = '' @space_before = '' @object_nl = '' @array_nl = '' @allow_nan = false @ascii_only = false @buffer_initial_length = 1024 configure opts end # This string is used to indent levels in the JSON text. attr_accessor :indent # This string is used to insert a space between the tokens in a JSON # string. attr_accessor :space # This string is used to insert a space before the ':' in JSON objects. attr_accessor :space_before # This string is put at the end of a line that holds a JSON object (or # Hash). attr_accessor :object_nl # This string is put at the end of a line that holds a JSON array. attr_accessor :array_nl # This integer returns the maximum level of data structure nesting in # the generated JSON, max_nesting = 0 if no maximum is checked. attr_accessor :max_nesting # :stopdoc: attr_reader :buffer_initial_length def buffer_initial_length=(length) if length > 0 @buffer_initial_length = length end end # :startdoc: # This integer returns the current depth data structure nesting in the # generated JSON. attr_accessor :depth def check_max_nesting # :nodoc: return if @max_nesting.zero? current_nesting = depth + 1 current_nesting > @max_nesting and raise NestingError, "nesting of #{current_nesting} is too deep" end # Returns true, if circular data structures are checked, # otherwise returns false. def check_circular? !@max_nesting.zero? end # Returns true if NaN, Infinity, and -Infinity should be considered as # valid JSON and output. def allow_nan? @allow_nan end # Returns true, if only ASCII characters should be generated. Otherwise # returns false. def ascii_only? @ascii_only end # Configure this State instance with the Hash _opts_, and return # itself. def configure(opts) if opts.respond_to?(:to_hash) opts = opts.to_hash elsif opts.respond_to?(:to_h) opts = opts.to_h else raise TypeError, "can't convert #{opts.class} into Hash" end for key, value in opts instance_variable_set "@#{key}", value end @indent = opts[:indent] if opts.key?(:indent) @space = opts[:space] if opts.key?(:space) @space_before = opts[:space_before] if opts.key?(:space_before) @object_nl = opts[:object_nl] if opts.key?(:object_nl) @array_nl = opts[:array_nl] if opts.key?(:array_nl) @allow_nan = !!opts[:allow_nan] if opts.key?(:allow_nan) @ascii_only = opts[:ascii_only] if opts.key?(:ascii_only) @depth = opts[:depth] || 0 @buffer_initial_length ||= opts[:buffer_initial_length] if !opts.key?(:max_nesting) # defaults to 100 @max_nesting = 100 elsif opts[:max_nesting] @max_nesting = opts[:max_nesting] else @max_nesting = 0 end self end alias merge configure # Returns the configuration instance variables as a hash, that can be # passed to the configure method. def to_h result = {} for iv in instance_variables iv = iv.to_s[1..-1] result[iv.to_sym] = self[iv] end result end alias to_hash to_h # Generates a valid JSON document from object +obj+ and # returns the result. If no valid JSON document can be # created this method raises a # GeneratorError exception. def generate(obj) result = obj.to_json(self) JSON.valid_utf8?(result) or raise GeneratorError, "source sequence #{result.inspect} is illegal/malformed utf-8" result end # Return the value returned by method +name+. def [](name) if respond_to?(name) __send__(name) else instance_variable_get("@#{name}") end end def []=(name, value) if respond_to?(name_writer = "#{name}=") __send__ name_writer, value else instance_variable_set "@#{name}", value end end end module GeneratorMethods module Object # Converts this object to a string (calling #to_s), converts # it to a JSON string, and returns the result. This is a fallback, if no # special method #to_json was defined for some object. def to_json(*) to_s.to_json end end module Hash # Returns a JSON string containing a JSON object, that is unparsed from # this Hash instance. # _state_ is a JSON::State object, that can also be used to configure the # produced JSON string output further. # _depth_ is used to find out nesting depth, to indent accordingly. def to_json(state = nil, *) state = State.from_state(state) state.check_max_nesting json_transform(state) end private def json_shift(state) state.object_nl.empty? or return '' state.indent * state.depth end def json_transform(state) delim = ',' delim << state.object_nl result = '{' result << state.object_nl depth = state.depth += 1 first = true indent = !state.object_nl.empty? each { |key,value| result << delim unless first result << state.indent * depth if indent result << key.to_s.to_json(state) result << state.space_before result << ':' result << state.space if value.respond_to?(:to_json) result << value.to_json(state) else result << %{"#{String(value)}"} end first = false } depth = state.depth -= 1 result << state.object_nl result << state.indent * depth if indent result << '}' result end end module Array # Returns a JSON string containing a JSON array, that is unparsed from # this Array instance. # _state_ is a JSON::State object, that can also be used to configure the # produced JSON string output further. def to_json(state = nil, *) state = State.from_state(state) state.check_max_nesting json_transform(state) end private def json_transform(state) delim = ',' delim << state.array_nl result = '[' result << state.array_nl depth = state.depth += 1 first = true indent = !state.array_nl.empty? each { |value| result << delim unless first result << state.indent * depth if indent if value.respond_to?(:to_json) result << value.to_json(state) else result << %{"#{String(value)}"} end first = false } depth = state.depth -= 1 result << state.array_nl result << state.indent * depth if indent result << ']' end end module Integer # Returns a JSON string representation for this Integer number. def to_json(*) to_s end end module Float # Returns a JSON string representation for this Float number. def to_json(state = nil, *) state = State.from_state(state) case when infinite? if state.allow_nan? to_s else raise GeneratorError, "#{self} not allowed in JSON" end when nan? if state.allow_nan? to_s else raise GeneratorError, "#{self} not allowed in JSON" end else to_s end end end module String # This string should be encoded with UTF-8 A call to this method # returns a JSON string encoded with UTF16 big endian characters as # \u????. def to_json(state = nil, *args) state = State.from_state(state) if encoding == ::Encoding::UTF_8 string = self else string = encode(::Encoding::UTF_8) end if state.ascii_only? '"' << JSON.utf8_to_json_ascii(string) << '"' else '"' << JSON.utf8_to_json(string) << '"' end end # Module that holds the extinding methods if, the String module is # included. module Extend # Raw Strings are JSON Objects (the raw bytes are stored in an # array for the key "raw"). The Ruby String can be created by this # module method. def json_create(o) o['raw'].pack('C*') end end # Extends _modul_ with the String::Extend module. def self.included(modul) modul.extend Extend end # This method creates a raw object hash, that can be nested into # other data structures and will be unparsed as a raw string. This # method should be used, if you want to convert raw strings to JSON # instead of UTF-8 strings, e. g. binary data. def to_json_raw_object { JSON.create_id => self.class.name, 'raw' => self.unpack('C*'), } end # This method creates a JSON text from the result of # a call to to_json_raw_object of this String. def to_json_raw(*args) to_json_raw_object.to_json(*args) end end module TrueClass # Returns a JSON string for true: 'true'. def to_json(*) 'true' end end module FalseClass # Returns a JSON string for false: 'false'. def to_json(*) 'false' end end module NilClass # Returns a JSON string for nil: 'null'. def to_json(*) 'null' end end end end end end ruby-json-2.1.0+dfsg.orig/lib/json/common.rb0000644000175000017500000003575413113111601020234 0ustar boutilboutil#frozen_string_literal: false require 'json/version' require 'json/generic_object' module JSON class << self # If _object_ is string-like, parse the string and return the parsed # result as a Ruby data structure. Otherwise generate a JSON text from the # Ruby data structure object and return it. # # The _opts_ argument is passed through to generate/parse respectively. # See generate and parse for their documentation. def [](object, opts = {}) if object.respond_to? :to_str JSON.parse(object.to_str, opts) else JSON.generate(object, opts) end end # Returns the JSON parser class that is used by JSON. This is either # JSON::Ext::Parser or JSON::Pure::Parser. attr_reader :parser # Set the JSON parser class _parser_ to be used by JSON. def parser=(parser) # :nodoc: @parser = parser remove_const :Parser if const_defined?(:Parser, false) const_set :Parser, parser end # Return the constant located at _path_. The format of _path_ has to be # either ::A::B::C or A::B::C. In any case, A has to be located at the top # level (absolute namespace path?). If there doesn't exist a constant at # the given path, an ArgumentError is raised. def deep_const_get(path) # :nodoc: path.to_s.split(/::/).inject(Object) do |p, c| case when c.empty? then p when p.const_defined?(c, true) then p.const_get(c) else begin p.const_missing(c) rescue NameError => e raise ArgumentError, "can't get const #{path}: #{e}" end end end end # Set the module _generator_ to be used by JSON. def generator=(generator) # :nodoc: old, $VERBOSE = $VERBOSE, nil @generator = generator generator_methods = generator::GeneratorMethods for const in generator_methods.constants klass = deep_const_get(const) modul = generator_methods.const_get(const) klass.class_eval do instance_methods(false).each do |m| m.to_s == 'to_json' and remove_method m end include modul end end self.state = generator::State const_set :State, self.state const_set :SAFE_STATE_PROTOTYPE, State.new const_set :FAST_STATE_PROTOTYPE, State.new( :indent => '', :space => '', :object_nl => "", :array_nl => "", :max_nesting => false ) const_set :PRETTY_STATE_PROTOTYPE, State.new( :indent => ' ', :space => ' ', :object_nl => "\n", :array_nl => "\n" ) ensure $VERBOSE = old end # Returns the JSON generator module that is used by JSON. This is # either JSON::Ext::Generator or JSON::Pure::Generator. attr_reader :generator # Returns the JSON generator state class that is used by JSON. This is # either JSON::Ext::Generator::State or JSON::Pure::Generator::State. attr_accessor :state # This is create identifier, which is used to decide if the _json_create_ # hook of a class should be called. It defaults to 'json_class'. attr_accessor :create_id end self.create_id = 'json_class' NaN = 0.0/0 Infinity = 1.0/0 MinusInfinity = -Infinity # The base exception for JSON errors. class JSONError < StandardError def self.wrap(exception) obj = new("Wrapped(#{exception.class}): #{exception.message.inspect}") obj.set_backtrace exception.backtrace obj end end # This exception is raised if a parser error occurs. class ParserError < JSONError; end # This exception is raised if the nesting of parsed data structures is too # deep. class NestingError < ParserError; end # :stopdoc: class CircularDatastructure < NestingError; end # :startdoc: # This exception is raised if a generator or unparser error occurs. class GeneratorError < JSONError; end # For backwards compatibility UnparserError = GeneratorError # This exception is raised if the required unicode support is missing on the # system. Usually this means that the iconv library is not installed. class MissingUnicodeSupport < JSONError; end module_function # Parse the JSON document _source_ into a Ruby data structure and return it. # # _opts_ can have the following # keys: # * *max_nesting*: The maximum depth of nesting allowed in the parsed data # structures. Disable depth checking with :max_nesting => false. It # defaults to 100. # * *allow_nan*: If set to true, allow NaN, Infinity and -Infinity in # defiance of RFC 7159 to be parsed by the Parser. This option defaults # to false. # * *symbolize_names*: If set to true, returns symbols for the names # (keys) in a JSON object. Otherwise strings are returned. Strings are # the default. # * *create_additions*: If set to false, the Parser doesn't create # additions even if a matching class and create_id was found. This option # defaults to false. # * *object_class*: Defaults to Hash # * *array_class*: Defaults to Array def parse(source, opts = {}) Parser.new(source, opts).parse end # Parse the JSON document _source_ into a Ruby data structure and return it. # The bang version of the parse method defaults to the more dangerous values # for the _opts_ hash, so be sure only to parse trusted _source_ documents. # # _opts_ can have the following keys: # * *max_nesting*: The maximum depth of nesting allowed in the parsed data # structures. Enable depth checking with :max_nesting => anInteger. The # parse! methods defaults to not doing max depth checking: This can be # dangerous if someone wants to fill up your stack. # * *allow_nan*: If set to true, allow NaN, Infinity, and -Infinity in # defiance of RFC 7159 to be parsed by the Parser. This option defaults # to true. # * *create_additions*: If set to false, the Parser doesn't create # additions even if a matching class and create_id was found. This option # defaults to false. def parse!(source, opts = {}) opts = { :max_nesting => false, :allow_nan => true }.merge(opts) Parser.new(source, opts).parse end # Generate a JSON document from the Ruby data structure _obj_ and return # it. _state_ is * a JSON::State object, # * or a Hash like object (responding to to_hash), # * an object convertible into a hash by a to_h method, # that is used as or to configure a State object. # # It defaults to a state object, that creates the shortest possible JSON text # in one line, checks for circular data structures and doesn't allow NaN, # Infinity, and -Infinity. # # A _state_ hash can have the following keys: # * *indent*: a string used to indent levels (default: ''), # * *space*: a string that is put after, a : or , delimiter (default: ''), # * *space_before*: a string that is put before a : pair delimiter (default: ''), # * *object_nl*: a string that is put at the end of a JSON object (default: ''), # * *array_nl*: a string that is put at the end of a JSON array (default: ''), # * *allow_nan*: true if NaN, Infinity, and -Infinity should be # generated, otherwise an exception is thrown if these values are # encountered. This options defaults to false. # * *max_nesting*: The maximum depth of nesting allowed in the data # structures from which JSON is to be generated. Disable depth checking # with :max_nesting => false, it defaults to 100. # # See also the fast_generate for the fastest creation method with the least # amount of sanity checks, and the pretty_generate method for some # defaults for pretty output. def generate(obj, opts = nil) if State === opts state, opts = opts, nil else state = SAFE_STATE_PROTOTYPE.dup end if opts if opts.respond_to? :to_hash opts = opts.to_hash elsif opts.respond_to? :to_h opts = opts.to_h else raise TypeError, "can't convert #{opts.class} into Hash" end state = state.configure(opts) end state.generate(obj) end # :stopdoc: # I want to deprecate these later, so I'll first be silent about them, and # later delete them. alias unparse generate module_function :unparse # :startdoc: # Generate a JSON document from the Ruby data structure _obj_ and return it. # This method disables the checks for circles in Ruby objects. # # *WARNING*: Be careful not to pass any Ruby data structures with circles as # _obj_ argument because this will cause JSON to go into an infinite loop. def fast_generate(obj, opts = nil) if State === opts state, opts = opts, nil else state = FAST_STATE_PROTOTYPE.dup end if opts if opts.respond_to? :to_hash opts = opts.to_hash elsif opts.respond_to? :to_h opts = opts.to_h else raise TypeError, "can't convert #{opts.class} into Hash" end state.configure(opts) end state.generate(obj) end # :stopdoc: # I want to deprecate these later, so I'll first be silent about them, and later delete them. alias fast_unparse fast_generate module_function :fast_unparse # :startdoc: # Generate a JSON document from the Ruby data structure _obj_ and return it. # The returned document is a prettier form of the document returned by # #unparse. # # The _opts_ argument can be used to configure the generator. See the # generate method for a more detailed explanation. def pretty_generate(obj, opts = nil) if State === opts state, opts = opts, nil else state = PRETTY_STATE_PROTOTYPE.dup end if opts if opts.respond_to? :to_hash opts = opts.to_hash elsif opts.respond_to? :to_h opts = opts.to_h else raise TypeError, "can't convert #{opts.class} into Hash" end state.configure(opts) end state.generate(obj) end # :stopdoc: # I want to deprecate these later, so I'll first be silent about them, and later delete them. alias pretty_unparse pretty_generate module_function :pretty_unparse # :startdoc: class << self # The global default options for the JSON.load method: # :max_nesting: false # :allow_nan: true # :allow_blank: true attr_accessor :load_default_options end self.load_default_options = { :max_nesting => false, :allow_nan => true, :allow_blank => true, :create_additions => true, } # Load a ruby data structure from a JSON _source_ and return it. A source can # either be a string-like object, an IO-like object, or an object responding # to the read method. If _proc_ was given, it will be called with any nested # Ruby object as an argument recursively in depth first order. To modify the # default options pass in the optional _options_ argument as well. # # BEWARE: This method is meant to serialise data from trusted user input, # like from your own database server or clients under your control, it could # be dangerous to allow untrusted users to pass JSON sources into it. The # default options for the parser can be changed via the load_default_options # method. # # This method is part of the implementation of the load/dump interface of # Marshal and YAML. def load(source, proc = nil, options = {}) opts = load_default_options.merge options if source.respond_to? :to_str source = source.to_str elsif source.respond_to? :to_io source = source.to_io.read elsif source.respond_to?(:read) source = source.read end if opts[:allow_blank] && (source.nil? || source.empty?) source = 'null' end result = parse(source, opts) recurse_proc(result, &proc) if proc result end # Recursively calls passed _Proc_ if the parsed data structure is an _Array_ or _Hash_ def recurse_proc(result, &proc) case result when Array result.each { |x| recurse_proc x, &proc } proc.call result when Hash result.each { |x, y| recurse_proc x, &proc; recurse_proc y, &proc } proc.call result else proc.call result end end alias restore load module_function :restore class << self # The global default options for the JSON.dump method: # :max_nesting: false # :allow_nan: true # :allow_blank: true attr_accessor :dump_default_options end self.dump_default_options = { :max_nesting => false, :allow_nan => true, } # Dumps _obj_ as a JSON string, i.e. calls generate on the object and returns # the result. # # If anIO (an IO-like object or an object that responds to the write method) # was given, the resulting JSON is written to it. # # If the number of nested arrays or objects exceeds _limit_, an ArgumentError # exception is raised. This argument is similar (but not exactly the # same!) to the _limit_ argument in Marshal.dump. # # The default options for the generator can be changed via the # dump_default_options method. # # This method is part of the implementation of the load/dump interface of # Marshal and YAML. def dump(obj, anIO = nil, limit = nil) if anIO and limit.nil? anIO = anIO.to_io if anIO.respond_to?(:to_io) unless anIO.respond_to?(:write) limit = anIO anIO = nil end end opts = JSON.dump_default_options opts = opts.merge(:max_nesting => limit) if limit result = generate(obj, opts) if anIO anIO.write result anIO else result end rescue JSON::NestingError raise ArgumentError, "exceed depth limit" end # Encodes string using Ruby's _String.encode_ def self.iconv(to, from, string) string.encode(to, from) end end module ::Kernel private # Outputs _objs_ to STDOUT as JSON strings in the shortest form, that is in # one line. def j(*objs) objs.each do |obj| puts JSON::generate(obj, :allow_nan => true, :max_nesting => false) end nil end # Outputs _objs_ to STDOUT as JSON strings in a pretty format, with # indentation and over many lines. def jj(*objs) objs.each do |obj| puts JSON::pretty_generate(obj, :allow_nan => true, :max_nesting => false) end nil end # If _object_ is string-like, parse the string and return the parsed result as # a Ruby data structure. Otherwise, generate a JSON text from the Ruby data # structure object and return it. # # The _opts_ argument is passed through to generate/parse respectively. See # generate and parse for their documentation. def JSON(object, *args) if object.respond_to? :to_str JSON.parse(object.to_str, args.first) else JSON.generate(object, args.first) end end end # Extends any Class to include _json_creatable?_ method. class ::Class # Returns true if this class can be used to create an instance # from a serialised JSON string. The class has to implement a class # method _json_create_ that expects a hash as first parameter. The hash # should include the required data. def json_creatable? respond_to?(:json_create) end end ruby-json-2.1.0+dfsg.orig/lib/json/version.rb0000644000175000017500000000045613113111601020420 0ustar boutilboutil# frozen_string_literal: false module JSON # JSON version VERSION = '2.1.0' VERSION_ARRAY = VERSION.split(/\./).map { |x| x.to_i } # :nodoc: VERSION_MAJOR = VERSION_ARRAY[0] # :nodoc: VERSION_MINOR = VERSION_ARRAY[1] # :nodoc: VERSION_BUILD = VERSION_ARRAY[2] # :nodoc: end ruby-json-2.1.0+dfsg.orig/lib/json/pure.rb0000644000175000017500000000060613113111601017703 0ustar boutilboutilrequire 'json/common' module JSON # This module holds all the modules/classes that implement JSON's # functionality in pure ruby. module Pure require 'json/pure/parser' require 'json/pure/generator' $DEBUG and warn "Using Pure library for JSON." JSON.parser = Parser JSON.generator = Generator end JSON_LOADED = true unless defined?(::JSON::JSON_LOADED) end ruby-json-2.1.0+dfsg.orig/CHANGES.md0000644000175000017500000004224413113111601016262 0ustar boutilboutil# Changes ## 2017-04-18 (2.1.0) * Allow passing of `decimal_class` option to specify a class as which to parse JSON float numbers. ## 2017-03-23 (2.0.4) * Raise exception for incomplete unicode surrogates/character escape sequences. This problem was reported by Daniel Gollahon (dgollahon). * Fix arbitrary heap exposure problem. This problem was reported by Ahmad Sherif (ahmadsherif). ## 2017-01-12 (2.0.3) * Set `required_ruby_version` to 1.9 * Some small fixes ## 2016-07-26 (2.0.2) * Specify `required_ruby_version` for json\_pure. * Fix issue #295 failure when parsing frozen strings. ## 2016-07-01 (2.0.1) * Fix problem when requiring json\_pure and Parser constant was defined top level. * Add `RB_GC_GUARD` to avoid possible GC problem via Pete Johns. * Store `current_nesting` on stack by Aaron Patterson. ## 2015-09-11 (2.0.0) * Now complies to newest JSON RFC 7159. * Implements compatibiliy to ruby 2.4 integer unification. * Drops support for old rubies whose life has ended, that is rubies < 2.0. Also see https://www.ruby-lang.org/en/news/2014/07/01/eol-for-1-8-7-and-1-9-2/ * There were still some mentions of dual GPL licensing in the source, but JSON has just the Ruby license that itself includes an explicit dual-licensing clause that allows covered software to be distributed under the terms of the Simplified BSD License instead for all ruby versions >= 1.9.3. This is however a GPL compatible license according to the Free Software Foundation. I changed these mentions to be consistent with the Ruby license setting in the gemspec files which were already correct now. ## 2015-06-01 (1.8.3) * Fix potential memory leak, thx to nobu. ## 2015-01-08 (1.8.2) * Some performance improvements by Vipul A M . * Fix by Jason R. Clark to avoid mutation of `JSON.dump_default_options`. * More tests by Michael Mac-Vicar and fixing `space_before` accessor in generator. * Performance on Jruby improved by Ben Browning . * Some fixes to be compatible with the new Ruby 2.2 by Zachary Scott and SHIBATA Hiroshi . ## 2013-05-13 (1.8.1) * Remove Rubinius exception since transcoding should be working now. ## 2013-05-13 (1.8.0) * Fix https://github.com/flori/json/issues/162 reported by Marc-Andre Lafortune . Thanks! * Applied patches by Yui NARUSE to suppress warning with -Wchar-subscripts and better validate UTF-8 strings. * Applied patch by ginriki@github to remove unnecessary if. * Add load/dump interface to `JSON::GenericObject` to make serialize :some_attribute, `JSON::GenericObject` work in Rails active models for convenient `SomeModel#some_attribute.foo.bar` access to serialised JSON data. ## 2013-02-04 (1.7.7) * Security fix for JSON create_additions default value and `JSON::GenericObject`. It should not be possible to create additions unless explicitely requested by setting the create_additions argument to true or using the JSON.load/dump interface. If `JSON::GenericObject` is supposed to be automatically deserialised, this has to be explicitely enabled by setting JSON::GenericObject.json_creatable = true as well. * Remove useless assert in fbuffer implementation. * Apply patch attached to https://github.com/flori/json/issues#issue/155 provided by John Shahid , Thx! * Add license information to rubygems spec data, reported by Jordi Massaguer Pla . * Improve documentation, thx to Zachary Scott . ## 2012-11-29 (1.7.6) * Add `GeneratorState#merge` alias for JRuby, fix state accessor methods. Thx to jvshahid@github. * Increase hash likeness of state objects. ## 2012-08-17 (1.7.5) * Fix compilation of extension on older rubies. ## 2012-07-26 (1.7.4) * Fix compilation problem on AIX, see https://github.com/flori/json/issues/142 ## 2012-05-12 (1.7.3) * Work around Rubinius encoding issues using iconv for conversion instead. ## 2012-05-11 (1.7.2) * Fix some encoding issues, that cause problems for the pure and the extension variant in jruby 1.9 mode. ## 2012-04-28 (1.7.1) * Some small fixes for building ## 2012-04-28 (1.7.0) * Add `JSON::GenericObject` for method access to objects transmitted via JSON. ## 2012-04-27 (1.6.7) * Fix possible crash when trying to parse nil value. ## 2012-02-11 (1.6.6) * Propagate src encoding to values made from it (fixes 1.9 mode converting everything to ascii-8bit; harmless for 1.8 mode too) (Thomas E. Enebo ), should fix https://github.com/flori/json/issues#issue/119. * Fix https://github.com/flori/json/issues#issue/124 Thx to Jason Hutchens. * Fix https://github.com/flori/json/issues#issue/117 ## 2012-01-15 (1.6.5) * Vit Ondruch reported a bug that shows up when using optimisation under GCC 4.7. Thx to him, Bohuslav Kabrda and Yui NARUSE for debugging and developing a patch fix. ## 2011-12-24 (1.6.4) * Patches that improve speed on JRuby contributed by Charles Oliver Nutter . * Support `object_class`/`array_class` with duck typed hash/array. ## 2011-12-01 (1.6.3) * Let `JSON.load('')` return nil as well to make mysql text columns (default to `''`) work better for serialization. ## 2011-11-21 (1.6.2) * Add support for OpenStruct and BigDecimal. * Fix bug when parsing nil in `quirks_mode`. * Make JSON.dump and JSON.load methods better cooperate with Rails' serialize method. Just use: serialize :value, JSON * Fix bug with time serialization concerning nanoseconds. Thanks for the patch go to Josh Partlow (jpartlow@github). * Improve parsing speed for JSON numbers (integers and floats) in a similar way to what Evan Phoenix suggested in: https://github.com/flori/json/pull/103 ## 2011-09-18 (1.6.1) * Using -target 1.5 to force Java bits to compile with 1.5. ## 2011-09-12 (1.6.0) * Extract utilities (prettifier and GUI-editor) in its own gem json-utils. * Split json/add/core into different files for classes to be serialised. ## 2011-08-31 (1.5.4) * Fix memory leak when used from multiple JRuby. (Patch by jfirebaugh@github). * Apply patch by Eric Wong that fixes garbage collection problem reported in https://github.com/flori/json/issues/46. * Add :quirks_mode option to parser and generator. * Add support for Rational and Complex number additions via json/add/complex and json/add/rational requires. ## 2011-06-20 (1.5.3) * Alias State#configure method as State#merge to increase duck type synonymy with Hash. * Add `as_json` methods in json/add/core, so rails can create its json objects the new way. ## 2011-05-11 (1.5.2) * Apply documentation patch by Cory Monty . * Add gemspecs for json and json\_pure. * Fix bug in jruby pretty printing. * Fix bug in `object_class` and `array_class` when inheriting from Hash or Array. ## 2011-01-24 (1.5.1) * Made rake-compiler build a fat binary gem. This should fix issue https://github.com/flori/json/issues#issue/54. ## 2011-01-22 (1.5.0) * Included Java source codes for the Jruby extension made by Daniel Luz . * Output full exception message of `deep_const_get` to aid debugging. * Fixed an issue with ruby 1.9 `Module#const_defined?` method, that was reported by Riley Goodside. ## 2010-08-09 (1.4.6) * Fixed oversight reported in http://github.com/flori/json/issues/closed#issue/23, always create a new object from the state prototype. * Made pure and ext api more similar again. ## 2010-08-07 (1.4.5) * Manage data structure nesting depth in state object during generation. This should reduce problems with `to_json` method definіtions that only have one argument. * Some fixes in the state objects and additional tests. ## 2010-08-06 (1.4.4) * Fixes build problem for rubinius under OS X, http://github.com/flori/json/issues/closed#issue/25 * Fixes crashes described in http://github.com/flori/json/issues/closed#issue/21 and http://github.com/flori/json/issues/closed#issue/23 ## 2010-05-05 (1.4.3) * Fixed some test assertions, from Ruby r27587 and r27590, patch by nobu. * Fixed issue http://github.com/flori/json/issues/#issue/20 reported by electronicwhisper@github. Thx! ## 2010-04-26 (1.4.2) * Applied patch from naruse Yui NARUSE to make building with Microsoft Visual C possible again. * Applied patch from devrandom in order to allow building of json_pure if extensiontask is not present. * Thanks to Dustin Schneider , who reported a memory leak, which is fixed in this release. * Applied 993f261ccb8f911d2ae57e9db48ec7acd0187283 patch from josh@github. ## 2010-04-25 (1.4.1) * Fix for a bug reported by Dan DeLeo , caused by T_FIXNUM being different on 32bit/64bit architectures. ## 2010-04-23 (1.4.0) * Major speed improvements and building with simplified directory/file-structure. * Extension should at least be comapatible with MRI, YARV and Rubinius. ## 2010-04-07 (1.2.4) * Triger const_missing callback to make Rails' dynamic class loading work. ## 2010-03-11 (1.2.3) * Added a `State#[]` method which returns an attribute's value in order to increase duck type compatibility to Hash. ## 2010-02-27 (1.2.2) * Made some changes to make the building of the parser/generator compatible to Rubinius. ## 2009-11-25 (1.2.1) * Added `:symbolize_names` option to Parser, which returns symbols instead of strings in object names/keys. ## 2009-10-01 (1.2.0) * `fast_generate` now raises an exeception for nan and infinite floats. * On Ruby 1.8 json supports parsing of UTF-8, UTF-16BE, UTF-16LE, UTF-32BE, and UTF-32LE JSON documents now. Under Ruby 1.9 the M17n conversion functions are used to convert from all supported encodings. ASCII-8BIT encoded strings are handled like all strings under Ruby 1.8 were. * Better documentation ## 2009-08-23 (1.1.9) * Added forgotten main doc file `extra_rdoc_files`. ## 2009-08-23 (1.1.8) * Applied a patch by OZAWA Sakuro to make json/pure work in environments that don't provide iconv. * Applied patch by okkez_ in order to fix Ruby Bug #1768: http://redmine.ruby-lang.org/issues/show/1768. * Finally got around to avoid the rather paranoid escaping of ?/ characters in the generator's output. The parsers aren't affected by this change. Thanks to Rich Apodaca for the suggestion. ## 2009-06-29 (1.1.7) * Security Fix for JSON::Pure::Parser. A specially designed string could cause catastrophic backtracking in one of the parser's regular expressions in earlier 1.1.x versions. JSON::Ext::Parser isn't affected by this issue. Thanks to Bartosz Blimke for reporting this problem. * This release also uses a less strict ruby version requirement for the creation of the mswin32 native gem. ## 2009-05-10 (1.1.6) * No changes. І tested native linux gems in the last release and they don't play well with different ruby versions other than the one the gem was built with. This release is just to bump the version number in order to skip the native gem on rubyforge. ## 2009-05-10 (1.1.5) * Started to build gems with rake-compiler gem. * Applied patch object/array class patch from Brian Candler and fixes. ## 2009-04-01 (1.1.4) * Fixed a bug in the creation of serialized generic rails objects reported by Friedrich Graeter . * Deleted tests/runner.rb, we're using testrb instead. * Editor supports Infinity in numbers now. * Made some changes in order to get the library to compile/run under Ruby 1.9. * Improved speed of the code path for the fast_generate method in the pure variant. ## 2008-07-10 (1.1.3) * Wesley Beary reported a bug in json/add/core's DateTime handling: If the nominator and denominator of the offset were divisible by each other Ruby's Rational#to_s returns them as an integer not a fraction with '/'. This caused a ZeroDivisionError during parsing. * Use Date#start and DateTime#start instead of sg method, while remaining backwards compatible. * Supports ragel >= 6.0 now. * Corrected some tests. * Some minor changes. ## 2007-11-27 (1.1.2) * Remember default dir (last used directory) in editor. * JSON::Editor.edit method added, the editor can now receive json texts from the clipboard via C-v. * Load json texts from an URL pasted via middle button press. * Added :create_additions option to Parser. This makes it possible to disable the creation of additions by force, in order to treat json texts as data while having additions loaded. * Jacob Maine reported, that JSON(:foo) outputs a JSON object if the rails addition is enabled, which is wrong. It now outputs a JSON string "foo" instead, like suggested by Jacob Maine. * Discovered a bug in the Ruby Bugs Tracker on rubyforge, that was reported by John Evans lgastako@gmail.com. He could produce a crash in the JSON generator by returning something other than a String instance from a to_json method. I now guard against this by doing a rather crude type check, which raises an exception instead of crashing. ## 2007-07-06 (1.1.1) * Yui NARUSE sent some patches to fix tests for Ruby 1.9. I applied them and adapted some of them a bit to run both on 1.8 and 1.9. * Introduced a `JSON.parse!` method without depth checking for people who like danger. * Made generate and `pretty_generate` methods configurable by an options hash. * Added :allow_nan option to parser and generator in order to handle NaN, Infinity, and -Infinity correctly - if requested. Floats, which aren't numbers, aren't valid JSON according to RFC4627, so by default an exception will be raised if any of these symbols are encountered. Thanks to Andrea Censi for his hint about this. * Fixed some more tests for Ruby 1.9. * Implemented dump/load interface of Marshal as suggested in ruby-core:11405 by murphy . * Implemented the `max_nesting` feature for generate methods, too. * Added some implementations for ruby core's custom objects for serialisation/deserialisation purposes. ## 2007-05-21 (1.1.0) * Implemented max_nesting feature for parser to avoid stack overflows for data from untrusted sources. If you trust the source, you can disable it with the option max_nesting => false. * Piers Cawley reported a bug, that not every character can be escaped by `\` as required by RFC4627. There's a contradiction between David Crockford's JSON checker test vectors (in tests/fixtures) and RFC4627, though. I decided to stick to the RFC, because the JSON checker seems to be a bit older than the RFC. * Extended license to Ruby License, which includes the GPL. * Added keyboard shortcuts, and 'Open location' menu item to edit_json.rb. ## 2007-05-09 (1.0.4) * Applied a patch from Yui NARUSE to make JSON compile under Ruby 1.9. Thank you very much for mailing it to me! * Made binary variants of JSON fail early, instead of falling back to the pure version. This should avoid overshadowing of eventual problems while loading of the binary. ## 2007-03-24 (1.0.3) * Improved performance of pure variant a bit. * The ext variant of this release supports the mswin32 platform. Ugh! ## 2007-03-24 (1.0.2) * Ext Parser didn't parse 0e0 correctly into 0.0: Fixed! ## 2007-03-24 (1.0.1) * Forgot some object files in the build dir. I really like that - not! ## 2007-03-24 (1.0.0) * Added C implementations for the JSON generator and a ragel based JSON parser in C. * Much more tests, especially fixtures from json.org. * Further improved conformance to RFC4627. ## 2007-02-09 (0.4.3) * Conform more to RFC4627 for JSON: This means JSON strings now always must contain exactly one object `"{ ... }"` or array `"[ ... ]"` in order to be parsed without raising an exception. The definition of what constitutes a whitespace is narrower in JSON than in Ruby ([ \t\r\n]), and there are differences in floats and integers (no octals or hexadecimals) as well. * Added aliases generate and `pretty_generate` of unparse and `pretty_unparse`. * Fixed a test case. * Catch an `Iconv::InvalidEncoding` exception, that seems to occur on some Sun boxes with SunOS 5.8, if iconv doesn't support utf16 conversions. This was reported by Andrew R Jackson , thanks a bunch! ## 2006-08-25 (0.4.2) * Fixed a bug in handling solidi (/-characters), that was reported by Kevin Gilpin . ## 2006-02-06 (0.4.1) * Fixed a bug related to escaping with backslashes. Thanks for the report go to Florian Munz . ## 2005-09-23 (0.4.0) * Initial Rubyforge Version ruby-json-2.1.0+dfsg.orig/java/0000755000175000017500000000000013113111601015603 5ustar boutilboutilruby-json-2.1.0+dfsg.orig/java/src/0000755000175000017500000000000013113111601016372 5ustar boutilboutilruby-json-2.1.0+dfsg.orig/java/src/json/0000755000175000017500000000000013113111601017343 5ustar boutilboutilruby-json-2.1.0+dfsg.orig/java/src/json/ext/0000755000175000017500000000000013113111601020143 5ustar boutilboutilruby-json-2.1.0+dfsg.orig/java/src/json/ext/Parser.java0000644000175000017500000017135613113111601022257 0ustar boutilboutil // line 1 "Parser.rl" /* * This code is copyrighted work by Daniel Luz . * * Distributed under the Ruby license: https://www.ruby-lang.org/en/about/license.txt */ package json.ext; import org.jruby.Ruby; import org.jruby.RubyArray; import org.jruby.RubyClass; import org.jruby.RubyEncoding; import org.jruby.RubyFloat; import org.jruby.RubyHash; import org.jruby.RubyInteger; import org.jruby.RubyModule; import org.jruby.RubyNumeric; import org.jruby.RubyObject; import org.jruby.RubyString; import org.jruby.anno.JRubyMethod; import org.jruby.exceptions.JumpException; import org.jruby.exceptions.RaiseException; import org.jruby.runtime.Block; import org.jruby.runtime.ObjectAllocator; import org.jruby.runtime.ThreadContext; import org.jruby.runtime.Visibility; import org.jruby.runtime.builtin.IRubyObject; import org.jruby.util.ByteList; import org.jruby.util.ConvertBytes; import static org.jruby.util.ConvertDouble.DoubleConverter; /** * The JSON::Ext::Parser class. * *

This is the JSON parser implemented as a Java class. To use it as the * standard parser, set *

JSON.parser = JSON::Ext::Parser
* This is performed for you when you include "json/ext". * *

This class does not perform the actual parsing, just acts as an interface * to Ruby code. When the {@link #parse()} method is invoked, a * Parser.ParserSession object is instantiated, which handles the process. * * @author mernen */ public class Parser extends RubyObject { private final RuntimeInfo info; private RubyString vSource; private RubyString createId; private boolean createAdditions; private int maxNesting; private boolean allowNaN; private boolean symbolizeNames; private RubyClass objectClass; private RubyClass arrayClass; private RubyClass decimalClass; private RubyHash matchString; private static final int DEFAULT_MAX_NESTING = 100; private static final ByteList JSON_MINUS_INFINITY = new ByteList(ByteList.plain("-Infinity")); // constant names in the JSON module containing those values private static final String CONST_NAN = "NaN"; private static final String CONST_INFINITY = "Infinity"; private static final String CONST_MINUS_INFINITY = "MinusInfinity"; static final ObjectAllocator ALLOCATOR = new ObjectAllocator() { public IRubyObject allocate(Ruby runtime, RubyClass klazz) { return new Parser(runtime, klazz); } }; /** * Multiple-value return for internal parser methods. * *

All the parseStuff methods return instances of * ParserResult when successful, or null when * there's a problem with the input data. */ static final class ParserResult { /** * The result of the successful parsing. Should never be * null. */ IRubyObject result; /** * The point where the parser returned. */ int p; void update(IRubyObject result, int p) { this.result = result; this.p = p; } } public Parser(Ruby runtime, RubyClass metaClass) { super(runtime, metaClass); info = RuntimeInfo.forRuntime(runtime); } /** * Parser.new(source, opts = {}) * *

Creates a new JSON::Ext::Parser instance for the string * source. * It will be configured by the opts Hash. * opts can have the following keys: * *

*
:max_nesting *
The maximum depth of nesting allowed in the parsed data * structures. Disable depth checking with :max_nesting => false|nil|0, * it defaults to 100. * *
:allow_nan *
If set to true, allow NaN, * Infinity and -Infinity in defiance of RFC 4627 * to be parsed by the Parser. This option defaults to false. * *
:symbolize_names *
If set to true, returns symbols for the names (keys) in * a JSON object. Otherwise strings are returned, which is also the default. * *
:create_additions *
If set to false, the Parser doesn't create additions * even if a matching class and create_id was found. This option * defaults to true. * *
:object_class *
Defaults to Hash. * *
:array_class *
Defaults to Array. * *
:decimal_class *
Specifies which class to use instead of the default (Float) when * parsing decimal numbers. This class must accept a single string argument * in its constructor. *
*/ @JRubyMethod(name = "new", required = 1, optional = 1, meta = true) public static IRubyObject newInstance(IRubyObject clazz, IRubyObject[] args, Block block) { Parser parser = (Parser)((RubyClass)clazz).allocate(); parser.callInit(args, block); return parser; } @JRubyMethod(required = 1, optional = 1, visibility = Visibility.PRIVATE) public IRubyObject initialize(ThreadContext context, IRubyObject[] args) { Ruby runtime = context.getRuntime(); if (this.vSource != null) { throw runtime.newTypeError("already initialized instance"); } OptionsReader opts = new OptionsReader(context, args.length > 1 ? args[1] : null); this.maxNesting = opts.getInt("max_nesting", DEFAULT_MAX_NESTING); this.allowNaN = opts.getBool("allow_nan", false); this.symbolizeNames = opts.getBool("symbolize_names", false); this.createId = opts.getString("create_id", getCreateId(context)); this.createAdditions = opts.getBool("create_additions", false); this.objectClass = opts.getClass("object_class", runtime.getHash()); this.arrayClass = opts.getClass("array_class", runtime.getArray()); this.decimalClass = opts.getClass("decimal_class", null); this.matchString = opts.getHash("match_string"); if(symbolizeNames && createAdditions) { throw runtime.newArgumentError( "options :symbolize_names and :create_additions cannot be " + " used in conjunction" ); } this.vSource = args[0].convertToString(); this.vSource = convertEncoding(context, vSource); return this; } /** * Checks the given string's encoding. If a non-UTF-8 encoding is detected, * a converted copy is returned. * Returns the source string if no conversion is needed. */ private RubyString convertEncoding(ThreadContext context, RubyString source) { RubyEncoding encoding = (RubyEncoding)source.encoding(context); if (encoding == info.ascii8bit.get()) { if (source.isFrozen()) { source = (RubyString) source.dup(); } source.force_encoding(context, info.utf8.get()); } else { source = (RubyString) source.encode(context, info.utf8.get()); } return source; } /** * Checks the first four bytes of the given ByteList to infer its encoding, * using the principle demonstrated on section 3 of RFC 4627 (JSON). */ private static String sniffByteList(ByteList bl) { if (bl.length() < 4) return null; if (bl.get(0) == 0 && bl.get(2) == 0) { return bl.get(1) == 0 ? "utf-32be" : "utf-16be"; } if (bl.get(1) == 0 && bl.get(3) == 0) { return bl.get(2) == 0 ? "utf-32le" : "utf-16le"; } return null; } /** * Assumes the given (binary) RubyString to be in the given encoding, then * converts it to UTF-8. */ private RubyString reinterpretEncoding(ThreadContext context, RubyString str, String sniffedEncoding) { RubyEncoding actualEncoding = info.getEncoding(context, sniffedEncoding); RubyEncoding targetEncoding = info.utf8.get(); RubyString dup = (RubyString)str.dup(); dup.force_encoding(context, actualEncoding); return (RubyString)dup.encode_bang(context, targetEncoding); } /** * Parser#parse() * *

Parses the current JSON text source and returns the * complete data structure as a result. */ @JRubyMethod public IRubyObject parse(ThreadContext context) { return new ParserSession(this, context, info).parse(); } /** * Parser#source() * *

Returns a copy of the current source string, that was * used to construct this Parser. */ @JRubyMethod(name = "source") public IRubyObject source_get() { return checkAndGetSource().dup(); } public RubyString checkAndGetSource() { if (vSource != null) { return vSource; } else { throw getRuntime().newTypeError("uninitialized instance"); } } /** * Queries JSON.create_id. Returns null if it is * set to nil or false, and a String if not. */ private RubyString getCreateId(ThreadContext context) { IRubyObject v = info.jsonModule.get().callMethod(context, "create_id"); return v.isTrue() ? v.convertToString() : null; } /** * A string parsing session. * *

Once a ParserSession is instantiated, the source string should not * change until the parsing is complete. The ParserSession object assumes * the source {@link RubyString} is still associated to its original * {@link ByteList}, which in turn must still be bound to the same * byte[] value (and on the same offset). */ // Ragel uses lots of fall-through @SuppressWarnings("fallthrough") private static class ParserSession { private final Parser parser; private final ThreadContext context; private final RuntimeInfo info; private final ByteList byteList; private final ByteList view; private final byte[] data; private final StringDecoder decoder; private int currentNesting = 0; private final DoubleConverter dc; // initialization value for all state variables. // no idea about the origins of this value, ask Flori ;) private static final int EVIL = 0x666; private ParserSession(Parser parser, ThreadContext context, RuntimeInfo info) { this.parser = parser; this.context = context; this.info = info; this.byteList = parser.checkAndGetSource().getByteList(); this.data = byteList.unsafeBytes(); this.view = new ByteList(data, false); this.decoder = new StringDecoder(context); this.dc = new DoubleConverter(); } private RaiseException unexpectedToken(int absStart, int absEnd) { RubyString msg = getRuntime().newString("unexpected token at '") .cat(data, absStart, absEnd - absStart) .cat((byte)'\''); return newException(Utils.M_PARSER_ERROR, msg); } private Ruby getRuntime() { return context.getRuntime(); } // line 339 "Parser.rl" // line 321 "Parser.java" private static byte[] init__JSON_value_actions_0() { return new byte [] { 0, 1, 0, 1, 1, 1, 2, 1, 3, 1, 4, 1, 5, 1, 6, 1, 7, 1, 8, 1, 9 }; } private static final byte _JSON_value_actions[] = init__JSON_value_actions_0(); private static byte[] init__JSON_value_key_offsets_0() { return new byte [] { 0, 0, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30 }; } private static final byte _JSON_value_key_offsets[] = init__JSON_value_key_offsets_0(); private static char[] init__JSON_value_trans_keys_0() { return new char [] { 34, 45, 73, 78, 91, 102, 110, 116, 123, 48, 57, 110, 102, 105, 110, 105, 116, 121, 97, 78, 97, 108, 115, 101, 117, 108, 108, 114, 117, 101, 0 }; } private static final char _JSON_value_trans_keys[] = init__JSON_value_trans_keys_0(); private static byte[] init__JSON_value_single_lengths_0() { return new byte [] { 0, 9, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0 }; } private static final byte _JSON_value_single_lengths[] = init__JSON_value_single_lengths_0(); private static byte[] init__JSON_value_range_lengths_0() { return new byte [] { 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 }; } private static final byte _JSON_value_range_lengths[] = init__JSON_value_range_lengths_0(); private static byte[] init__JSON_value_index_offsets_0() { return new byte [] { 0, 0, 11, 13, 15, 17, 19, 21, 23, 25, 27, 29, 31, 33, 35, 37, 39, 41, 43, 45, 47, 49 }; } private static final byte _JSON_value_index_offsets[] = init__JSON_value_index_offsets_0(); private static byte[] init__JSON_value_trans_targs_0() { return new byte [] { 21, 21, 2, 9, 21, 11, 15, 18, 21, 21, 0, 3, 0, 4, 0, 5, 0, 6, 0, 7, 0, 8, 0, 21, 0, 10, 0, 21, 0, 12, 0, 13, 0, 14, 0, 21, 0, 16, 0, 17, 0, 21, 0, 19, 0, 20, 0, 21, 0, 0, 0 }; } private static final byte _JSON_value_trans_targs[] = init__JSON_value_trans_targs_0(); private static byte[] init__JSON_value_trans_actions_0() { return new byte [] { 13, 11, 0, 0, 15, 0, 0, 0, 17, 11, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 9, 0, 0, 0, 7, 0, 0, 0, 0, 0, 0, 0, 3, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 5, 0, 0, 0 }; } private static final byte _JSON_value_trans_actions[] = init__JSON_value_trans_actions_0(); private static byte[] init__JSON_value_from_state_actions_0() { return new byte [] { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 19 }; } private static final byte _JSON_value_from_state_actions[] = init__JSON_value_from_state_actions_0(); static final int JSON_value_start = 1; static final int JSON_value_first_final = 21; static final int JSON_value_error = 0; static final int JSON_value_en_main = 1; // line 445 "Parser.rl" void parseValue(ParserResult res, int p, int pe) { int cs = EVIL; IRubyObject result = null; // line 443 "Parser.java" { cs = JSON_value_start; } // line 452 "Parser.rl" // line 450 "Parser.java" { int _klen; int _trans = 0; int _acts; int _nacts; int _keys; int _goto_targ = 0; _goto: while (true) { switch ( _goto_targ ) { case 0: if ( p == pe ) { _goto_targ = 4; continue _goto; } if ( cs == 0 ) { _goto_targ = 5; continue _goto; } case 1: _acts = _JSON_value_from_state_actions[cs]; _nacts = (int) _JSON_value_actions[_acts++]; while ( _nacts-- > 0 ) { switch ( _JSON_value_actions[_acts++] ) { case 9: // line 430 "Parser.rl" { p--; { p += 1; _goto_targ = 5; if (true) continue _goto;} } break; // line 482 "Parser.java" } } _match: do { _keys = _JSON_value_key_offsets[cs]; _trans = _JSON_value_index_offsets[cs]; _klen = _JSON_value_single_lengths[cs]; if ( _klen > 0 ) { int _lower = _keys; int _mid; int _upper = _keys + _klen - 1; while (true) { if ( _upper < _lower ) break; _mid = _lower + ((_upper-_lower) >> 1); if ( data[p] < _JSON_value_trans_keys[_mid] ) _upper = _mid - 1; else if ( data[p] > _JSON_value_trans_keys[_mid] ) _lower = _mid + 1; else { _trans += (_mid - _keys); break _match; } } _keys += _klen; _trans += _klen; } _klen = _JSON_value_range_lengths[cs]; if ( _klen > 0 ) { int _lower = _keys; int _mid; int _upper = _keys + (_klen<<1) - 2; while (true) { if ( _upper < _lower ) break; _mid = _lower + (((_upper-_lower) >> 1) & ~1); if ( data[p] < _JSON_value_trans_keys[_mid] ) _upper = _mid - 2; else if ( data[p] > _JSON_value_trans_keys[_mid+1] ) _lower = _mid + 2; else { _trans += ((_mid - _keys)>>1); break _match; } } _trans += _klen; } } while (false); cs = _JSON_value_trans_targs[_trans]; if ( _JSON_value_trans_actions[_trans] != 0 ) { _acts = _JSON_value_trans_actions[_trans]; _nacts = (int) _JSON_value_actions[_acts++]; while ( _nacts-- > 0 ) { switch ( _JSON_value_actions[_acts++] ) { case 0: // line 347 "Parser.rl" { result = getRuntime().getNil(); } break; case 1: // line 350 "Parser.rl" { result = getRuntime().getFalse(); } break; case 2: // line 353 "Parser.rl" { result = getRuntime().getTrue(); } break; case 3: // line 356 "Parser.rl" { if (parser.allowNaN) { result = getConstant(CONST_NAN); } else { throw unexpectedToken(p - 2, pe); } } break; case 4: // line 363 "Parser.rl" { if (parser.allowNaN) { result = getConstant(CONST_INFINITY); } else { throw unexpectedToken(p - 7, pe); } } break; case 5: // line 370 "Parser.rl" { if (pe > p + 8 && absSubSequence(p, p + 9).equals(JSON_MINUS_INFINITY)) { if (parser.allowNaN) { result = getConstant(CONST_MINUS_INFINITY); {p = (( p + 10))-1;} p--; { p += 1; _goto_targ = 5; if (true) continue _goto;} } else { throw unexpectedToken(p, pe); } } parseFloat(res, p, pe); if (res.result != null) { result = res.result; {p = (( res.p))-1;} } parseInteger(res, p, pe); if (res.result != null) { result = res.result; {p = (( res.p))-1;} } p--; { p += 1; _goto_targ = 5; if (true) continue _goto;} } break; case 6: // line 396 "Parser.rl" { parseString(res, p, pe); if (res.result == null) { p--; { p += 1; _goto_targ = 5; if (true) continue _goto;} } else { result = res.result; {p = (( res.p))-1;} } } break; case 7: // line 406 "Parser.rl" { currentNesting++; parseArray(res, p, pe); currentNesting--; if (res.result == null) { p--; { p += 1; _goto_targ = 5; if (true) continue _goto;} } else { result = res.result; {p = (( res.p))-1;} } } break; case 8: // line 418 "Parser.rl" { currentNesting++; parseObject(res, p, pe); currentNesting--; if (res.result == null) { p--; { p += 1; _goto_targ = 5; if (true) continue _goto;} } else { result = res.result; {p = (( res.p))-1;} } } break; // line 654 "Parser.java" } } } case 2: if ( cs == 0 ) { _goto_targ = 5; continue _goto; } if ( ++p != pe ) { _goto_targ = 1; continue _goto; } case 4: case 5: } break; } } // line 453 "Parser.rl" if (cs >= JSON_value_first_final && result != null) { res.update(result, p); } else { res.update(null, p); } } // line 684 "Parser.java" private static byte[] init__JSON_integer_actions_0() { return new byte [] { 0, 1, 0 }; } private static final byte _JSON_integer_actions[] = init__JSON_integer_actions_0(); private static byte[] init__JSON_integer_key_offsets_0() { return new byte [] { 0, 0, 4, 7, 9, 9 }; } private static final byte _JSON_integer_key_offsets[] = init__JSON_integer_key_offsets_0(); private static char[] init__JSON_integer_trans_keys_0() { return new char [] { 45, 48, 49, 57, 48, 49, 57, 48, 57, 48, 57, 0 }; } private static final char _JSON_integer_trans_keys[] = init__JSON_integer_trans_keys_0(); private static byte[] init__JSON_integer_single_lengths_0() { return new byte [] { 0, 2, 1, 0, 0, 0 }; } private static final byte _JSON_integer_single_lengths[] = init__JSON_integer_single_lengths_0(); private static byte[] init__JSON_integer_range_lengths_0() { return new byte [] { 0, 1, 1, 1, 0, 1 }; } private static final byte _JSON_integer_range_lengths[] = init__JSON_integer_range_lengths_0(); private static byte[] init__JSON_integer_index_offsets_0() { return new byte [] { 0, 0, 4, 7, 9, 10 }; } private static final byte _JSON_integer_index_offsets[] = init__JSON_integer_index_offsets_0(); private static byte[] init__JSON_integer_indicies_0() { return new byte [] { 0, 2, 3, 1, 2, 3, 1, 1, 4, 1, 3, 4, 0 }; } private static final byte _JSON_integer_indicies[] = init__JSON_integer_indicies_0(); private static byte[] init__JSON_integer_trans_targs_0() { return new byte [] { 2, 0, 3, 5, 4 }; } private static final byte _JSON_integer_trans_targs[] = init__JSON_integer_trans_targs_0(); private static byte[] init__JSON_integer_trans_actions_0() { return new byte [] { 0, 0, 0, 0, 1 }; } private static final byte _JSON_integer_trans_actions[] = init__JSON_integer_trans_actions_0(); static final int JSON_integer_start = 1; static final int JSON_integer_first_final = 3; static final int JSON_integer_error = 0; static final int JSON_integer_en_main = 1; // line 472 "Parser.rl" void parseInteger(ParserResult res, int p, int pe) { int new_p = parseIntegerInternal(p, pe); if (new_p == -1) { res.update(null, p); return; } RubyInteger number = createInteger(p, new_p); res.update(number, new_p + 1); return; } int parseIntegerInternal(int p, int pe) { int cs = EVIL; // line 801 "Parser.java" { cs = JSON_integer_start; } // line 489 "Parser.rl" int memo = p; // line 809 "Parser.java" { int _klen; int _trans = 0; int _acts; int _nacts; int _keys; int _goto_targ = 0; _goto: while (true) { switch ( _goto_targ ) { case 0: if ( p == pe ) { _goto_targ = 4; continue _goto; } if ( cs == 0 ) { _goto_targ = 5; continue _goto; } case 1: _match: do { _keys = _JSON_integer_key_offsets[cs]; _trans = _JSON_integer_index_offsets[cs]; _klen = _JSON_integer_single_lengths[cs]; if ( _klen > 0 ) { int _lower = _keys; int _mid; int _upper = _keys + _klen - 1; while (true) { if ( _upper < _lower ) break; _mid = _lower + ((_upper-_lower) >> 1); if ( data[p] < _JSON_integer_trans_keys[_mid] ) _upper = _mid - 1; else if ( data[p] > _JSON_integer_trans_keys[_mid] ) _lower = _mid + 1; else { _trans += (_mid - _keys); break _match; } } _keys += _klen; _trans += _klen; } _klen = _JSON_integer_range_lengths[cs]; if ( _klen > 0 ) { int _lower = _keys; int _mid; int _upper = _keys + (_klen<<1) - 2; while (true) { if ( _upper < _lower ) break; _mid = _lower + (((_upper-_lower) >> 1) & ~1); if ( data[p] < _JSON_integer_trans_keys[_mid] ) _upper = _mid - 2; else if ( data[p] > _JSON_integer_trans_keys[_mid+1] ) _lower = _mid + 2; else { _trans += ((_mid - _keys)>>1); break _match; } } _trans += _klen; } } while (false); _trans = _JSON_integer_indicies[_trans]; cs = _JSON_integer_trans_targs[_trans]; if ( _JSON_integer_trans_actions[_trans] != 0 ) { _acts = _JSON_integer_trans_actions[_trans]; _nacts = (int) _JSON_integer_actions[_acts++]; while ( _nacts-- > 0 ) { switch ( _JSON_integer_actions[_acts++] ) { case 0: // line 466 "Parser.rl" { p--; { p += 1; _goto_targ = 5; if (true) continue _goto;} } break; // line 896 "Parser.java" } } } case 2: if ( cs == 0 ) { _goto_targ = 5; continue _goto; } if ( ++p != pe ) { _goto_targ = 1; continue _goto; } case 4: case 5: } break; } } // line 491 "Parser.rl" if (cs < JSON_integer_first_final) { return -1; } return p; } RubyInteger createInteger(int p, int new_p) { Ruby runtime = getRuntime(); ByteList num = absSubSequence(p, new_p); return bytesToInum(runtime, num); } RubyInteger bytesToInum(Ruby runtime, ByteList num) { return runtime.is1_9() ? ConvertBytes.byteListToInum19(runtime, num, 10, true) : ConvertBytes.byteListToInum(runtime, num, 10, true); } // line 938 "Parser.java" private static byte[] init__JSON_float_actions_0() { return new byte [] { 0, 1, 0 }; } private static final byte _JSON_float_actions[] = init__JSON_float_actions_0(); private static byte[] init__JSON_float_key_offsets_0() { return new byte [] { 0, 0, 4, 7, 10, 12, 16, 18, 23, 29, 29 }; } private static final byte _JSON_float_key_offsets[] = init__JSON_float_key_offsets_0(); private static char[] init__JSON_float_trans_keys_0() { return new char [] { 45, 48, 49, 57, 48, 49, 57, 46, 69, 101, 48, 57, 43, 45, 48, 57, 48, 57, 46, 69, 101, 48, 57, 69, 101, 45, 46, 48, 57, 69, 101, 45, 46, 48, 57, 0 }; } private static final char _JSON_float_trans_keys[] = init__JSON_float_trans_keys_0(); private static byte[] init__JSON_float_single_lengths_0() { return new byte [] { 0, 2, 1, 3, 0, 2, 0, 3, 2, 0, 2 }; } private static final byte _JSON_float_single_lengths[] = init__JSON_float_single_lengths_0(); private static byte[] init__JSON_float_range_lengths_0() { return new byte [] { 0, 1, 1, 0, 1, 1, 1, 1, 2, 0, 2 }; } private static final byte _JSON_float_range_lengths[] = init__JSON_float_range_lengths_0(); private static byte[] init__JSON_float_index_offsets_0() { return new byte [] { 0, 0, 4, 7, 11, 13, 17, 19, 24, 29, 30 }; } private static final byte _JSON_float_index_offsets[] = init__JSON_float_index_offsets_0(); private static byte[] init__JSON_float_indicies_0() { return new byte [] { 0, 2, 3, 1, 2, 3, 1, 4, 5, 5, 1, 6, 1, 7, 7, 8, 1, 8, 1, 4, 5, 5, 3, 1, 5, 5, 1, 6, 9, 1, 1, 1, 1, 8, 9, 0 }; } private static final byte _JSON_float_indicies[] = init__JSON_float_indicies_0(); private static byte[] init__JSON_float_trans_targs_0() { return new byte [] { 2, 0, 3, 7, 4, 5, 8, 6, 10, 9 }; } private static final byte _JSON_float_trans_targs[] = init__JSON_float_trans_targs_0(); private static byte[] init__JSON_float_trans_actions_0() { return new byte [] { 0, 0, 0, 0, 0, 0, 0, 0, 0, 1 }; } private static final byte _JSON_float_trans_actions[] = init__JSON_float_trans_actions_0(); static final int JSON_float_start = 1; static final int JSON_float_first_final = 8; static final int JSON_float_error = 0; static final int JSON_float_en_main = 1; // line 526 "Parser.rl" void parseFloat(ParserResult res, int p, int pe) { int new_p = parseFloatInternal(p, pe); if (new_p == -1) { res.update(null, p); return; } IRubyObject number = parser.decimalClass == null ? createFloat(p, new_p) : createCustomDecimal(p, new_p); res.update(number, new_p + 1); return; } int parseFloatInternal(int p, int pe) { int cs = EVIL; // line 1060 "Parser.java" { cs = JSON_float_start; } // line 545 "Parser.rl" int memo = p; // line 1068 "Parser.java" { int _klen; int _trans = 0; int _acts; int _nacts; int _keys; int _goto_targ = 0; _goto: while (true) { switch ( _goto_targ ) { case 0: if ( p == pe ) { _goto_targ = 4; continue _goto; } if ( cs == 0 ) { _goto_targ = 5; continue _goto; } case 1: _match: do { _keys = _JSON_float_key_offsets[cs]; _trans = _JSON_float_index_offsets[cs]; _klen = _JSON_float_single_lengths[cs]; if ( _klen > 0 ) { int _lower = _keys; int _mid; int _upper = _keys + _klen - 1; while (true) { if ( _upper < _lower ) break; _mid = _lower + ((_upper-_lower) >> 1); if ( data[p] < _JSON_float_trans_keys[_mid] ) _upper = _mid - 1; else if ( data[p] > _JSON_float_trans_keys[_mid] ) _lower = _mid + 1; else { _trans += (_mid - _keys); break _match; } } _keys += _klen; _trans += _klen; } _klen = _JSON_float_range_lengths[cs]; if ( _klen > 0 ) { int _lower = _keys; int _mid; int _upper = _keys + (_klen<<1) - 2; while (true) { if ( _upper < _lower ) break; _mid = _lower + (((_upper-_lower) >> 1) & ~1); if ( data[p] < _JSON_float_trans_keys[_mid] ) _upper = _mid - 2; else if ( data[p] > _JSON_float_trans_keys[_mid+1] ) _lower = _mid + 2; else { _trans += ((_mid - _keys)>>1); break _match; } } _trans += _klen; } } while (false); _trans = _JSON_float_indicies[_trans]; cs = _JSON_float_trans_targs[_trans]; if ( _JSON_float_trans_actions[_trans] != 0 ) { _acts = _JSON_float_trans_actions[_trans]; _nacts = (int) _JSON_float_actions[_acts++]; while ( _nacts-- > 0 ) { switch ( _JSON_float_actions[_acts++] ) { case 0: // line 517 "Parser.rl" { p--; { p += 1; _goto_targ = 5; if (true) continue _goto;} } break; // line 1155 "Parser.java" } } } case 2: if ( cs == 0 ) { _goto_targ = 5; continue _goto; } if ( ++p != pe ) { _goto_targ = 1; continue _goto; } case 4: case 5: } break; } } // line 547 "Parser.rl" if (cs < JSON_float_first_final) { return -1; } return p; } RubyFloat createFloat(int p, int new_p) { Ruby runtime = getRuntime(); ByteList num = absSubSequence(p, new_p); return RubyFloat.newFloat(runtime, dc.parse(num, true, runtime.is1_9())); } IRubyObject createCustomDecimal(int p, int new_p) { Ruby runtime = getRuntime(); ByteList num = absSubSequence(p, new_p); IRubyObject numString = runtime.newString(num.toString()); return parser.decimalClass.callMethod(context, "new", numString); } // line 1198 "Parser.java" private static byte[] init__JSON_string_actions_0() { return new byte [] { 0, 2, 0, 1 }; } private static final byte _JSON_string_actions[] = init__JSON_string_actions_0(); private static byte[] init__JSON_string_key_offsets_0() { return new byte [] { 0, 0, 1, 5, 8, 14, 20, 26, 32 }; } private static final byte _JSON_string_key_offsets[] = init__JSON_string_key_offsets_0(); private static char[] init__JSON_string_trans_keys_0() { return new char [] { 34, 34, 92, 0, 31, 117, 0, 31, 48, 57, 65, 70, 97, 102, 48, 57, 65, 70, 97, 102, 48, 57, 65, 70, 97, 102, 48, 57, 65, 70, 97, 102, 0 }; } private static final char _JSON_string_trans_keys[] = init__JSON_string_trans_keys_0(); private static byte[] init__JSON_string_single_lengths_0() { return new byte [] { 0, 1, 2, 1, 0, 0, 0, 0, 0 }; } private static final byte _JSON_string_single_lengths[] = init__JSON_string_single_lengths_0(); private static byte[] init__JSON_string_range_lengths_0() { return new byte [] { 0, 0, 1, 1, 3, 3, 3, 3, 0 }; } private static final byte _JSON_string_range_lengths[] = init__JSON_string_range_lengths_0(); private static byte[] init__JSON_string_index_offsets_0() { return new byte [] { 0, 0, 2, 6, 9, 13, 17, 21, 25 }; } private static final byte _JSON_string_index_offsets[] = init__JSON_string_index_offsets_0(); private static byte[] init__JSON_string_indicies_0() { return new byte [] { 0, 1, 2, 3, 1, 0, 4, 1, 0, 5, 5, 5, 1, 6, 6, 6, 1, 7, 7, 7, 1, 0, 0, 0, 1, 1, 0 }; } private static final byte _JSON_string_indicies[] = init__JSON_string_indicies_0(); private static byte[] init__JSON_string_trans_targs_0() { return new byte [] { 2, 0, 8, 3, 4, 5, 6, 7 }; } private static final byte _JSON_string_trans_targs[] = init__JSON_string_trans_targs_0(); private static byte[] init__JSON_string_trans_actions_0() { return new byte [] { 0, 0, 1, 0, 0, 0, 0, 0 }; } private static final byte _JSON_string_trans_actions[] = init__JSON_string_trans_actions_0(); static final int JSON_string_start = 1; static final int JSON_string_first_final = 8; static final int JSON_string_error = 0; static final int JSON_string_en_main = 1; // line 599 "Parser.rl" void parseString(ParserResult res, int p, int pe) { int cs = EVIL; IRubyObject result = null; // line 1308 "Parser.java" { cs = JSON_string_start; } // line 606 "Parser.rl" int memo = p; // line 1316 "Parser.java" { int _klen; int _trans = 0; int _acts; int _nacts; int _keys; int _goto_targ = 0; _goto: while (true) { switch ( _goto_targ ) { case 0: if ( p == pe ) { _goto_targ = 4; continue _goto; } if ( cs == 0 ) { _goto_targ = 5; continue _goto; } case 1: _match: do { _keys = _JSON_string_key_offsets[cs]; _trans = _JSON_string_index_offsets[cs]; _klen = _JSON_string_single_lengths[cs]; if ( _klen > 0 ) { int _lower = _keys; int _mid; int _upper = _keys + _klen - 1; while (true) { if ( _upper < _lower ) break; _mid = _lower + ((_upper-_lower) >> 1); if ( data[p] < _JSON_string_trans_keys[_mid] ) _upper = _mid - 1; else if ( data[p] > _JSON_string_trans_keys[_mid] ) _lower = _mid + 1; else { _trans += (_mid - _keys); break _match; } } _keys += _klen; _trans += _klen; } _klen = _JSON_string_range_lengths[cs]; if ( _klen > 0 ) { int _lower = _keys; int _mid; int _upper = _keys + (_klen<<1) - 2; while (true) { if ( _upper < _lower ) break; _mid = _lower + (((_upper-_lower) >> 1) & ~1); if ( data[p] < _JSON_string_trans_keys[_mid] ) _upper = _mid - 2; else if ( data[p] > _JSON_string_trans_keys[_mid+1] ) _lower = _mid + 2; else { _trans += ((_mid - _keys)>>1); break _match; } } _trans += _klen; } } while (false); _trans = _JSON_string_indicies[_trans]; cs = _JSON_string_trans_targs[_trans]; if ( _JSON_string_trans_actions[_trans] != 0 ) { _acts = _JSON_string_trans_actions[_trans]; _nacts = (int) _JSON_string_actions[_acts++]; while ( _nacts-- > 0 ) { switch ( _JSON_string_actions[_acts++] ) { case 0: // line 574 "Parser.rl" { int offset = byteList.begin(); ByteList decoded = decoder.decode(byteList, memo + 1 - offset, p - offset); result = getRuntime().newString(decoded); if (result == null) { p--; { p += 1; _goto_targ = 5; if (true) continue _goto;} } else { {p = (( p + 1))-1;} } } break; case 1: // line 587 "Parser.rl" { p--; { p += 1; _goto_targ = 5; if (true) continue _goto;} } break; // line 1418 "Parser.java" } } } case 2: if ( cs == 0 ) { _goto_targ = 5; continue _goto; } if ( ++p != pe ) { _goto_targ = 1; continue _goto; } case 4: case 5: } break; } } // line 608 "Parser.rl" if (parser.createAdditions) { RubyHash matchString = parser.matchString; if (matchString != null) { final IRubyObject[] memoArray = { result, null }; try { matchString.visitAll(new RubyHash.Visitor() { @Override public void visit(IRubyObject pattern, IRubyObject klass) { if (pattern.callMethod(context, "===", memoArray[0]).isTrue()) { memoArray[1] = klass; throw JumpException.SPECIAL_JUMP; } } }); } catch (JumpException e) { } if (memoArray[1] != null) { RubyClass klass = (RubyClass) memoArray[1]; if (klass.respondsTo("json_creatable?") && klass.callMethod(context, "json_creatable?").isTrue()) { result = klass.callMethod(context, "json_create", result); } } } } if (cs >= JSON_string_first_final && result != null) { if (result instanceof RubyString) { ((RubyString)result).force_encoding(context, info.utf8.get()); } res.update(result, p + 1); } else { res.update(null, p + 1); } } // line 1476 "Parser.java" private static byte[] init__JSON_array_actions_0() { return new byte [] { 0, 1, 0, 1, 1 }; } private static final byte _JSON_array_actions[] = init__JSON_array_actions_0(); private static byte[] init__JSON_array_key_offsets_0() { return new byte [] { 0, 0, 1, 18, 25, 41, 43, 44, 46, 47, 49, 50, 52, 53, 55, 56, 58, 59 }; } private static final byte _JSON_array_key_offsets[] = init__JSON_array_key_offsets_0(); private static char[] init__JSON_array_trans_keys_0() { return new char [] { 91, 13, 32, 34, 45, 47, 73, 78, 91, 93, 102, 110, 116, 123, 9, 10, 48, 57, 13, 32, 44, 47, 93, 9, 10, 13, 32, 34, 45, 47, 73, 78, 91, 102, 110, 116, 123, 9, 10, 48, 57, 42, 47, 42, 42, 47, 10, 42, 47, 42, 42, 47, 10, 42, 47, 42, 42, 47, 10, 0 }; } private static final char _JSON_array_trans_keys[] = init__JSON_array_trans_keys_0(); private static byte[] init__JSON_array_single_lengths_0() { return new byte [] { 0, 1, 13, 5, 12, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 0 }; } private static final byte _JSON_array_single_lengths[] = init__JSON_array_single_lengths_0(); private static byte[] init__JSON_array_range_lengths_0() { return new byte [] { 0, 0, 2, 1, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 }; } private static final byte _JSON_array_range_lengths[] = init__JSON_array_range_lengths_0(); private static byte[] init__JSON_array_index_offsets_0() { return new byte [] { 0, 0, 2, 18, 25, 40, 43, 45, 48, 50, 53, 55, 58, 60, 63, 65, 68, 70 }; } private static final byte _JSON_array_index_offsets[] = init__JSON_array_index_offsets_0(); private static byte[] init__JSON_array_indicies_0() { return new byte [] { 0, 1, 0, 0, 2, 2, 3, 2, 2, 2, 4, 2, 2, 2, 2, 0, 2, 1, 5, 5, 6, 7, 4, 5, 1, 6, 6, 2, 2, 8, 2, 2, 2, 2, 2, 2, 2, 6, 2, 1, 9, 10, 1, 11, 9, 11, 6, 9, 6, 10, 12, 13, 1, 14, 12, 14, 5, 12, 5, 13, 15, 16, 1, 17, 15, 17, 0, 15, 0, 16, 1, 0 }; } private static final byte _JSON_array_indicies[] = init__JSON_array_indicies_0(); private static byte[] init__JSON_array_trans_targs_0() { return new byte [] { 2, 0, 3, 13, 17, 3, 4, 9, 5, 6, 8, 7, 10, 12, 11, 14, 16, 15 }; } private static final byte _JSON_array_trans_targs[] = init__JSON_array_trans_targs_0(); private static byte[] init__JSON_array_trans_actions_0() { return new byte [] { 0, 0, 1, 0, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 }; } private static final byte _JSON_array_trans_actions[] = init__JSON_array_trans_actions_0(); static final int JSON_array_start = 1; static final int JSON_array_first_final = 17; static final int JSON_array_error = 0; static final int JSON_array_en_main = 1; // line 681 "Parser.rl" void parseArray(ParserResult res, int p, int pe) { int cs = EVIL; if (parser.maxNesting > 0 && currentNesting > parser.maxNesting) { throw newException(Utils.M_NESTING_ERROR, "nesting of " + currentNesting + " is too deep"); } IRubyObject result; if (parser.arrayClass == getRuntime().getArray()) { result = RubyArray.newArray(getRuntime()); } else { result = parser.arrayClass.newInstance(context, IRubyObject.NULL_ARRAY, Block.NULL_BLOCK); } // line 1609 "Parser.java" { cs = JSON_array_start; } // line 700 "Parser.rl" // line 1616 "Parser.java" { int _klen; int _trans = 0; int _acts; int _nacts; int _keys; int _goto_targ = 0; _goto: while (true) { switch ( _goto_targ ) { case 0: if ( p == pe ) { _goto_targ = 4; continue _goto; } if ( cs == 0 ) { _goto_targ = 5; continue _goto; } case 1: _match: do { _keys = _JSON_array_key_offsets[cs]; _trans = _JSON_array_index_offsets[cs]; _klen = _JSON_array_single_lengths[cs]; if ( _klen > 0 ) { int _lower = _keys; int _mid; int _upper = _keys + _klen - 1; while (true) { if ( _upper < _lower ) break; _mid = _lower + ((_upper-_lower) >> 1); if ( data[p] < _JSON_array_trans_keys[_mid] ) _upper = _mid - 1; else if ( data[p] > _JSON_array_trans_keys[_mid] ) _lower = _mid + 1; else { _trans += (_mid - _keys); break _match; } } _keys += _klen; _trans += _klen; } _klen = _JSON_array_range_lengths[cs]; if ( _klen > 0 ) { int _lower = _keys; int _mid; int _upper = _keys + (_klen<<1) - 2; while (true) { if ( _upper < _lower ) break; _mid = _lower + (((_upper-_lower) >> 1) & ~1); if ( data[p] < _JSON_array_trans_keys[_mid] ) _upper = _mid - 2; else if ( data[p] > _JSON_array_trans_keys[_mid+1] ) _lower = _mid + 2; else { _trans += ((_mid - _keys)>>1); break _match; } } _trans += _klen; } } while (false); _trans = _JSON_array_indicies[_trans]; cs = _JSON_array_trans_targs[_trans]; if ( _JSON_array_trans_actions[_trans] != 0 ) { _acts = _JSON_array_trans_actions[_trans]; _nacts = (int) _JSON_array_actions[_acts++]; while ( _nacts-- > 0 ) { switch ( _JSON_array_actions[_acts++] ) { case 0: // line 650 "Parser.rl" { parseValue(res, p, pe); if (res.result == null) { p--; { p += 1; _goto_targ = 5; if (true) continue _goto;} } else { if (parser.arrayClass == getRuntime().getArray()) { ((RubyArray)result).append(res.result); } else { result.callMethod(context, "<<", res.result); } {p = (( res.p))-1;} } } break; case 1: // line 665 "Parser.rl" { p--; { p += 1; _goto_targ = 5; if (true) continue _goto;} } break; // line 1720 "Parser.java" } } } case 2: if ( cs == 0 ) { _goto_targ = 5; continue _goto; } if ( ++p != pe ) { _goto_targ = 1; continue _goto; } case 4: case 5: } break; } } // line 701 "Parser.rl" if (cs >= JSON_array_first_final) { res.update(result, p + 1); } else { throw unexpectedToken(p, pe); } } // line 1750 "Parser.java" private static byte[] init__JSON_object_actions_0() { return new byte [] { 0, 1, 0, 1, 1, 1, 2 }; } private static final byte _JSON_object_actions[] = init__JSON_object_actions_0(); private static byte[] init__JSON_object_key_offsets_0() { return new byte [] { 0, 0, 1, 8, 14, 16, 17, 19, 20, 36, 43, 49, 51, 52, 54, 55, 57, 58, 60, 61, 63, 64, 66, 67, 69, 70, 72, 73 }; } private static final byte _JSON_object_key_offsets[] = init__JSON_object_key_offsets_0(); private static char[] init__JSON_object_trans_keys_0() { return new char [] { 123, 13, 32, 34, 47, 125, 9, 10, 13, 32, 47, 58, 9, 10, 42, 47, 42, 42, 47, 10, 13, 32, 34, 45, 47, 73, 78, 91, 102, 110, 116, 123, 9, 10, 48, 57, 13, 32, 44, 47, 125, 9, 10, 13, 32, 34, 47, 9, 10, 42, 47, 42, 42, 47, 10, 42, 47, 42, 42, 47, 10, 42, 47, 42, 42, 47, 10, 42, 47, 42, 42, 47, 10, 0 }; } private static final char _JSON_object_trans_keys[] = init__JSON_object_trans_keys_0(); private static byte[] init__JSON_object_single_lengths_0() { return new byte [] { 0, 1, 5, 4, 2, 1, 2, 1, 12, 5, 4, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 0 }; } private static final byte _JSON_object_single_lengths[] = init__JSON_object_single_lengths_0(); private static byte[] init__JSON_object_range_lengths_0() { return new byte [] { 0, 0, 1, 1, 0, 0, 0, 0, 2, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 }; } private static final byte _JSON_object_range_lengths[] = init__JSON_object_range_lengths_0(); private static byte[] init__JSON_object_index_offsets_0() { return new byte [] { 0, 0, 2, 9, 15, 18, 20, 23, 25, 40, 47, 53, 56, 58, 61, 63, 66, 68, 71, 73, 76, 78, 81, 83, 86, 88, 91, 93 }; } private static final byte _JSON_object_index_offsets[] = init__JSON_object_index_offsets_0(); private static byte[] init__JSON_object_indicies_0() { return new byte [] { 0, 1, 0, 0, 2, 3, 4, 0, 1, 5, 5, 6, 7, 5, 1, 8, 9, 1, 10, 8, 10, 5, 8, 5, 9, 7, 7, 11, 11, 12, 11, 11, 11, 11, 11, 11, 11, 7, 11, 1, 13, 13, 14, 15, 4, 13, 1, 14, 14, 2, 16, 14, 1, 17, 18, 1, 19, 17, 19, 14, 17, 14, 18, 20, 21, 1, 22, 20, 22, 13, 20, 13, 21, 23, 24, 1, 25, 23, 25, 7, 23, 7, 24, 26, 27, 1, 28, 26, 28, 0, 26, 0, 27, 1, 0 }; } private static final byte _JSON_object_indicies[] = init__JSON_object_indicies_0(); private static byte[] init__JSON_object_trans_targs_0() { return new byte [] { 2, 0, 3, 23, 27, 3, 4, 8, 5, 7, 6, 9, 19, 9, 10, 15, 11, 12, 14, 13, 16, 18, 17, 20, 22, 21, 24, 26, 25 }; } private static final byte _JSON_object_trans_targs[] = init__JSON_object_trans_targs_0(); private static byte[] init__JSON_object_trans_actions_0() { return new byte [] { 0, 0, 3, 0, 5, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 }; } private static final byte _JSON_object_trans_actions[] = init__JSON_object_trans_actions_0(); static final int JSON_object_start = 1; static final int JSON_object_first_final = 27; static final int JSON_object_error = 0; static final int JSON_object_en_main = 1; // line 760 "Parser.rl" void parseObject(ParserResult res, int p, int pe) { int cs = EVIL; IRubyObject lastName = null; boolean objectDefault = true; if (parser.maxNesting > 0 && currentNesting > parser.maxNesting) { throw newException(Utils.M_NESTING_ERROR, "nesting of " + currentNesting + " is too deep"); } // this is guaranteed to be a RubyHash due to the earlier // allocator test at OptionsReader#getClass IRubyObject result; if (parser.objectClass == getRuntime().getHash()) { result = RubyHash.newHash(getRuntime()); } else { objectDefault = false; result = parser.objectClass.newInstance(context, IRubyObject.NULL_ARRAY, Block.NULL_BLOCK); } // line 1898 "Parser.java" { cs = JSON_object_start; } // line 784 "Parser.rl" // line 1905 "Parser.java" { int _klen; int _trans = 0; int _acts; int _nacts; int _keys; int _goto_targ = 0; _goto: while (true) { switch ( _goto_targ ) { case 0: if ( p == pe ) { _goto_targ = 4; continue _goto; } if ( cs == 0 ) { _goto_targ = 5; continue _goto; } case 1: _match: do { _keys = _JSON_object_key_offsets[cs]; _trans = _JSON_object_index_offsets[cs]; _klen = _JSON_object_single_lengths[cs]; if ( _klen > 0 ) { int _lower = _keys; int _mid; int _upper = _keys + _klen - 1; while (true) { if ( _upper < _lower ) break; _mid = _lower + ((_upper-_lower) >> 1); if ( data[p] < _JSON_object_trans_keys[_mid] ) _upper = _mid - 1; else if ( data[p] > _JSON_object_trans_keys[_mid] ) _lower = _mid + 1; else { _trans += (_mid - _keys); break _match; } } _keys += _klen; _trans += _klen; } _klen = _JSON_object_range_lengths[cs]; if ( _klen > 0 ) { int _lower = _keys; int _mid; int _upper = _keys + (_klen<<1) - 2; while (true) { if ( _upper < _lower ) break; _mid = _lower + (((_upper-_lower) >> 1) & ~1); if ( data[p] < _JSON_object_trans_keys[_mid] ) _upper = _mid - 2; else if ( data[p] > _JSON_object_trans_keys[_mid+1] ) _lower = _mid + 2; else { _trans += ((_mid - _keys)>>1); break _match; } } _trans += _klen; } } while (false); _trans = _JSON_object_indicies[_trans]; cs = _JSON_object_trans_targs[_trans]; if ( _JSON_object_trans_actions[_trans] != 0 ) { _acts = _JSON_object_trans_actions[_trans]; _nacts = (int) _JSON_object_actions[_acts++]; while ( _nacts-- > 0 ) { switch ( _JSON_object_actions[_acts++] ) { case 0: // line 715 "Parser.rl" { parseValue(res, p, pe); if (res.result == null) { p--; { p += 1; _goto_targ = 5; if (true) continue _goto;} } else { if (parser.objectClass == getRuntime().getHash()) { ((RubyHash)result).op_aset(context, lastName, res.result); } else { result.callMethod(context, "[]=", new IRubyObject[] { lastName, res.result }); } {p = (( res.p))-1;} } } break; case 1: // line 730 "Parser.rl" { parseString(res, p, pe); if (res.result == null) { p--; { p += 1; _goto_targ = 5; if (true) continue _goto;} } else { RubyString name = (RubyString)res.result; if (parser.symbolizeNames) { lastName = context.getRuntime().is1_9() ? name.intern19() : name.intern(); } else { lastName = name; } {p = (( res.p))-1;} } } break; case 2: // line 748 "Parser.rl" { p--; { p += 1; _goto_targ = 5; if (true) continue _goto;} } break; // line 2029 "Parser.java" } } } case 2: if ( cs == 0 ) { _goto_targ = 5; continue _goto; } if ( ++p != pe ) { _goto_targ = 1; continue _goto; } case 4: case 5: } break; } } // line 785 "Parser.rl" if (cs < JSON_object_first_final) { res.update(null, p + 1); return; } IRubyObject returnedResult = result; // attempt to de-serialize object if (parser.createAdditions) { IRubyObject vKlassName; if (objectDefault) { vKlassName = ((RubyHash)result).op_aref(context, parser.createId); } else { vKlassName = result.callMethod(context, "[]", parser.createId); } if (!vKlassName.isNil()) { // might throw ArgumentError, we let it propagate IRubyObject klass = parser.info.jsonModule.get(). callMethod(context, "deep_const_get", vKlassName); if (klass.respondsTo("json_creatable?") && klass.callMethod(context, "json_creatable?").isTrue()) { returnedResult = klass.callMethod(context, "json_create", result); } } } res.update(returnedResult, p + 1); } // line 2082 "Parser.java" private static byte[] init__JSON_actions_0() { return new byte [] { 0, 1, 0 }; } private static final byte _JSON_actions[] = init__JSON_actions_0(); private static byte[] init__JSON_key_offsets_0() { return new byte [] { 0, 0, 16, 18, 19, 21, 22, 24, 25, 27, 28 }; } private static final byte _JSON_key_offsets[] = init__JSON_key_offsets_0(); private static char[] init__JSON_trans_keys_0() { return new char [] { 13, 32, 34, 45, 47, 73, 78, 91, 102, 110, 116, 123, 9, 10, 48, 57, 42, 47, 42, 42, 47, 10, 42, 47, 42, 42, 47, 10, 13, 32, 47, 9, 10, 0 }; } private static final char _JSON_trans_keys[] = init__JSON_trans_keys_0(); private static byte[] init__JSON_single_lengths_0() { return new byte [] { 0, 12, 2, 1, 2, 1, 2, 1, 2, 1, 3 }; } private static final byte _JSON_single_lengths[] = init__JSON_single_lengths_0(); private static byte[] init__JSON_range_lengths_0() { return new byte [] { 0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 1 }; } private static final byte _JSON_range_lengths[] = init__JSON_range_lengths_0(); private static byte[] init__JSON_index_offsets_0() { return new byte [] { 0, 0, 15, 18, 20, 23, 25, 28, 30, 33, 35 }; } private static final byte _JSON_index_offsets[] = init__JSON_index_offsets_0(); private static byte[] init__JSON_indicies_0() { return new byte [] { 0, 0, 2, 2, 3, 2, 2, 2, 2, 2, 2, 2, 0, 2, 1, 4, 5, 1, 6, 4, 6, 7, 4, 7, 5, 8, 9, 1, 10, 8, 10, 0, 8, 0, 9, 7, 7, 11, 7, 1, 0 }; } private static final byte _JSON_indicies[] = init__JSON_indicies_0(); private static byte[] init__JSON_trans_targs_0() { return new byte [] { 1, 0, 10, 6, 3, 5, 4, 10, 7, 9, 8, 2 }; } private static final byte _JSON_trans_targs[] = init__JSON_trans_targs_0(); private static byte[] init__JSON_trans_actions_0() { return new byte [] { 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0 }; } private static final byte _JSON_trans_actions[] = init__JSON_trans_actions_0(); static final int JSON_start = 1; static final int JSON_first_final = 10; static final int JSON_error = 0; static final int JSON_en_main = 1; // line 836 "Parser.rl" public IRubyObject parseImplemetation() { int cs = EVIL; int p, pe; IRubyObject result = null; ParserResult res = new ParserResult(); // line 2195 "Parser.java" { cs = JSON_start; } // line 845 "Parser.rl" p = byteList.begin(); pe = p + byteList.length(); // line 2204 "Parser.java" { int _klen; int _trans = 0; int _acts; int _nacts; int _keys; int _goto_targ = 0; _goto: while (true) { switch ( _goto_targ ) { case 0: if ( p == pe ) { _goto_targ = 4; continue _goto; } if ( cs == 0 ) { _goto_targ = 5; continue _goto; } case 1: _match: do { _keys = _JSON_key_offsets[cs]; _trans = _JSON_index_offsets[cs]; _klen = _JSON_single_lengths[cs]; if ( _klen > 0 ) { int _lower = _keys; int _mid; int _upper = _keys + _klen - 1; while (true) { if ( _upper < _lower ) break; _mid = _lower + ((_upper-_lower) >> 1); if ( data[p] < _JSON_trans_keys[_mid] ) _upper = _mid - 1; else if ( data[p] > _JSON_trans_keys[_mid] ) _lower = _mid + 1; else { _trans += (_mid - _keys); break _match; } } _keys += _klen; _trans += _klen; } _klen = _JSON_range_lengths[cs]; if ( _klen > 0 ) { int _lower = _keys; int _mid; int _upper = _keys + (_klen<<1) - 2; while (true) { if ( _upper < _lower ) break; _mid = _lower + (((_upper-_lower) >> 1) & ~1); if ( data[p] < _JSON_trans_keys[_mid] ) _upper = _mid - 2; else if ( data[p] > _JSON_trans_keys[_mid+1] ) _lower = _mid + 2; else { _trans += ((_mid - _keys)>>1); break _match; } } _trans += _klen; } } while (false); _trans = _JSON_indicies[_trans]; cs = _JSON_trans_targs[_trans]; if ( _JSON_trans_actions[_trans] != 0 ) { _acts = _JSON_trans_actions[_trans]; _nacts = (int) _JSON_actions[_acts++]; while ( _nacts-- > 0 ) { switch ( _JSON_actions[_acts++] ) { case 0: // line 822 "Parser.rl" { parseValue(res, p, pe); if (res.result == null) { p--; { p += 1; _goto_targ = 5; if (true) continue _goto;} } else { result = res.result; {p = (( res.p))-1;} } } break; // line 2297 "Parser.java" } } } case 2: if ( cs == 0 ) { _goto_targ = 5; continue _goto; } if ( ++p != pe ) { _goto_targ = 1; continue _goto; } case 4: case 5: } break; } } // line 848 "Parser.rl" if (cs >= JSON_first_final && p == pe) { return result; } else { throw unexpectedToken(p, pe); } } public IRubyObject parse() { return parseImplemetation(); } /** * Updates the "view" bytelist with the new offsets and returns it. * @param start * @param end */ private ByteList absSubSequence(int absStart, int absEnd) { view.setBegin(absStart); view.setRealSize(absEnd - absStart); return view; } /** * Retrieves a constant directly descended from the JSON module. * @param name The constant name */ private IRubyObject getConstant(String name) { return parser.info.jsonModule.get().getConstant(name); } private RaiseException newException(String className, String message) { return Utils.newException(context, className, message); } private RaiseException newException(String className, RubyString message) { return Utils.newException(context, className, message); } private RaiseException newException(String className, String messageBegin, ByteList messageEnd) { return newException(className, getRuntime().newString(messageBegin).cat(messageEnd)); } } } ruby-json-2.1.0+dfsg.orig/java/src/json/ext/OptionsReader.java0000644000175000017500000000766313113111601023600 0ustar boutilboutil/* * This code is copyrighted work by Daniel Luz . * * Distributed under the Ruby license: https://www.ruby-lang.org/en/about/license.txt */ package json.ext; import org.jruby.Ruby; import org.jruby.RubyClass; import org.jruby.RubyHash; import org.jruby.RubyNumeric; import org.jruby.RubyString; import org.jruby.runtime.ThreadContext; import org.jruby.runtime.builtin.IRubyObject; import org.jruby.util.ByteList; final class OptionsReader { private final ThreadContext context; private final Ruby runtime; private final RubyHash opts; private RuntimeInfo info; OptionsReader(ThreadContext context, IRubyObject vOpts) { this.context = context; this.runtime = context.getRuntime(); if (vOpts == null || vOpts.isNil()) { opts = null; } else if (vOpts.respondsTo("to_hash")) { opts = vOpts.convertToHash(); } else if (vOpts.respondsTo("to_h")) { opts = vOpts.callMethod(context, "to_h").convertToHash(); } else { opts = vOpts.convertToHash(); /* Should just raise the correct TypeError */ } } private RuntimeInfo getRuntimeInfo() { if (info != null) return info; info = RuntimeInfo.forRuntime(runtime); return info; } /** * Efficiently looks up items with a {@link RubySymbol Symbol} key * @param key The Symbol name to look up for * @return The item in the {@link RubyHash Hash}, or null * if not found */ IRubyObject get(String key) { return opts == null ? null : opts.fastARef(runtime.newSymbol(key)); } boolean getBool(String key, boolean defaultValue) { IRubyObject value = get(key); return value == null ? defaultValue : value.isTrue(); } int getInt(String key, int defaultValue) { IRubyObject value = get(key); if (value == null) return defaultValue; if (!value.isTrue()) return 0; return RubyNumeric.fix2int(value); } /** * Reads the setting from the options hash. If no entry is set for this * key or if it evaluates to false, returns null; attempts to * coerce the value to {@link RubyString String} otherwise. * @param key The Symbol name to look up for * @return null if the key is not in the Hash or if * its value evaluates to false * @throws RaiseException TypeError if the value does not * evaluate to false and can't be * converted to string */ ByteList getString(String key) { RubyString str = getString(key, null); return str == null ? null : str.getByteList().dup(); } RubyString getString(String key, RubyString defaultValue) { IRubyObject value = get(key); if (value == null || !value.isTrue()) return defaultValue; RubyString str = value.convertToString(); RuntimeInfo info = getRuntimeInfo(); if (str.encoding(context) != info.utf8.get()) { str = (RubyString)str.encode(context, info.utf8.get()); } return str; } /** * Reads the setting from the options hash. If it is nil or * undefined, returns the default value given. * If not, ensures it is a RubyClass instance and shares the same * allocator as the default value (i.e. for the basic types which have * their specific allocators, this ensures the passed value is * a subclass of them). */ RubyClass getClass(String key, RubyClass defaultValue) { IRubyObject value = get(key); if (value == null || value.isNil()) return defaultValue; return (RubyClass)value; } public RubyHash getHash(String key) { IRubyObject value = get(key); if (value == null || value.isNil()) return new RubyHash(runtime); return (RubyHash) value; } } ruby-json-2.1.0+dfsg.orig/java/src/json/ext/GeneratorService.java0000644000175000017500000000304413113111601024256 0ustar boutilboutil/* * This code is copyrighted work by Daniel Luz . * * Distributed under the Ruby license: https://www.ruby-lang.org/en/about/license.txt */ package json.ext; import java.io.IOException; import java.lang.ref.WeakReference; import org.jruby.Ruby; import org.jruby.RubyClass; import org.jruby.RubyModule; import org.jruby.runtime.load.BasicLibraryService; /** * The service invoked by JRuby's {@link org.jruby.runtime.load.LoadService LoadService}. * Defines the JSON::Ext::Generator module. * @author mernen */ public class GeneratorService implements BasicLibraryService { public boolean basicLoad(Ruby runtime) throws IOException { runtime.getLoadService().require("json/common"); RuntimeInfo info = RuntimeInfo.initRuntime(runtime); info.jsonModule = new WeakReference(runtime.defineModule("JSON")); RubyModule jsonExtModule = info.jsonModule.get().defineModuleUnder("Ext"); RubyModule generatorModule = jsonExtModule.defineModuleUnder("Generator"); RubyClass stateClass = generatorModule.defineClassUnder("State", runtime.getObject(), GeneratorState.ALLOCATOR); stateClass.defineAnnotatedMethods(GeneratorState.class); info.generatorStateClass = new WeakReference(stateClass); RubyModule generatorMethods = generatorModule.defineModuleUnder("GeneratorMethods"); GeneratorMethods.populate(info, generatorMethods); return true; } } ruby-json-2.1.0+dfsg.orig/java/src/json/ext/GeneratorState.java0000644000175000017500000004235513113111601023746 0ustar boutilboutil/* * This code is copyrighted work by Daniel Luz . * * Distributed under the Ruby license: https://www.ruby-lang.org/en/about/license.txt */ package json.ext; import org.jruby.Ruby; import org.jruby.RubyBoolean; import org.jruby.RubyClass; import org.jruby.RubyHash; import org.jruby.RubyInteger; import org.jruby.RubyNumeric; import org.jruby.RubyObject; import org.jruby.RubyString; import org.jruby.anno.JRubyMethod; import org.jruby.runtime.Block; import org.jruby.runtime.ObjectAllocator; import org.jruby.runtime.ThreadContext; import org.jruby.runtime.Visibility; import org.jruby.runtime.builtin.IRubyObject; import org.jruby.util.ByteList; /** * The JSON::Ext::Generator::State class. * *

This class is used to create State instances, that are use to hold data * while generating a JSON text from a a Ruby data structure. * * @author mernen */ public class GeneratorState extends RubyObject { /** * The indenting unit string. Will be repeated several times for larger * indenting levels. */ private ByteList indent = ByteList.EMPTY_BYTELIST; /** * The spacing to be added after a semicolon on a JSON object. * @see #spaceBefore */ private ByteList space = ByteList.EMPTY_BYTELIST; /** * The spacing to be added before a semicolon on a JSON object. * @see #space */ private ByteList spaceBefore = ByteList.EMPTY_BYTELIST; /** * Any suffix to be added after the comma for each element on a JSON object. * It is assumed to be a newline, if set. */ private ByteList objectNl = ByteList.EMPTY_BYTELIST; /** * Any suffix to be added after the comma for each element on a JSON Array. * It is assumed to be a newline, if set. */ private ByteList arrayNl = ByteList.EMPTY_BYTELIST; /** * The maximum level of nesting of structures allowed. * 0 means disabled. */ private int maxNesting = DEFAULT_MAX_NESTING; static final int DEFAULT_MAX_NESTING = 100; /** * Whether special float values (NaN, Infinity, * -Infinity) are accepted. * If set to false, an exception will be thrown upon * encountering one. */ private boolean allowNaN = DEFAULT_ALLOW_NAN; static final boolean DEFAULT_ALLOW_NAN = false; /** * If set to true all JSON documents generated do not contain * any other characters than ASCII characters. */ private boolean asciiOnly = DEFAULT_ASCII_ONLY; static final boolean DEFAULT_ASCII_ONLY = false; /** * If set to true all JSON values generated might not be * RFC-conform JSON documents. */ private boolean quirksMode = DEFAULT_QUIRKS_MODE; static final boolean DEFAULT_QUIRKS_MODE = false; /** * The initial buffer length of this state. (This isn't really used on all * non-C implementations.) */ private int bufferInitialLength = DEFAULT_BUFFER_INITIAL_LENGTH; static final int DEFAULT_BUFFER_INITIAL_LENGTH = 1024; /** * The current depth (inside a #to_json call) */ private int depth = 0; static final ObjectAllocator ALLOCATOR = new ObjectAllocator() { public IRubyObject allocate(Ruby runtime, RubyClass klazz) { return new GeneratorState(runtime, klazz); } }; public GeneratorState(Ruby runtime, RubyClass metaClass) { super(runtime, metaClass); } /** * State.from_state(opts) * *

Creates a State object from opts, which ought to be * {@link RubyHash Hash} to create a new State instance * configured by opts, something else to create an * unconfigured instance. If opts is a State * object, it is just returned. * @param clazzParam The receiver of the method call * ({@link RubyClass} State) * @param opts The object to use as a base for the new State * @param block The block passed to the method * @return A GeneratorState as determined above */ @JRubyMethod(meta=true) public static IRubyObject from_state(ThreadContext context, IRubyObject klass, IRubyObject opts) { return fromState(context, opts); } static GeneratorState fromState(ThreadContext context, IRubyObject opts) { return fromState(context, RuntimeInfo.forRuntime(context.getRuntime()), opts); } static GeneratorState fromState(ThreadContext context, RuntimeInfo info, IRubyObject opts) { RubyClass klass = info.generatorStateClass.get(); if (opts != null) { // if the given parameter is a Generator::State, return itself if (klass.isInstance(opts)) return (GeneratorState)opts; // if the given parameter is a Hash, pass it to the instantiator if (context.getRuntime().getHash().isInstance(opts)) { return (GeneratorState)klass.newInstance(context, new IRubyObject[] {opts}, Block.NULL_BLOCK); } } // for other values, return the safe prototype return (GeneratorState)info.getSafeStatePrototype(context).dup(); } /** * State#initialize(opts = {}) * * Instantiates a new State object, configured by opts. * * opts can have the following keys: * *

*
:indent *
a {@link RubyString String} used to indent levels (default: "") *
:space *
a String that is put after a ':' or ',' * delimiter (default: "") *
:space_before *
a String that is put before a ":" pair delimiter * (default: "") *
:object_nl *
a String that is put at the end of a JSON object (default: "") *
:array_nl *
a String that is put at the end of a JSON array (default: "") *
:allow_nan *
true if NaN, Infinity, and * -Infinity should be generated, otherwise an exception is * thrown if these values are encountered. * This options defaults to false. */ @JRubyMethod(optional=1, visibility=Visibility.PRIVATE) public IRubyObject initialize(ThreadContext context, IRubyObject[] args) { configure(context, args.length > 0 ? args[0] : null); return this; } @JRubyMethod public IRubyObject initialize_copy(ThreadContext context, IRubyObject vOrig) { Ruby runtime = context.getRuntime(); if (!(vOrig instanceof GeneratorState)) { throw runtime.newTypeError(vOrig, getType()); } GeneratorState orig = (GeneratorState)vOrig; this.indent = orig.indent; this.space = orig.space; this.spaceBefore = orig.spaceBefore; this.objectNl = orig.objectNl; this.arrayNl = orig.arrayNl; this.maxNesting = orig.maxNesting; this.allowNaN = orig.allowNaN; this.asciiOnly = orig.asciiOnly; this.quirksMode = orig.quirksMode; this.bufferInitialLength = orig.bufferInitialLength; this.depth = orig.depth; return this; } /** * Generates a valid JSON document from object obj and returns * the result. If no valid JSON document can be created this method raises * a GeneratorError exception. */ @JRubyMethod public IRubyObject generate(ThreadContext context, IRubyObject obj) { RubyString result = Generator.generateJson(context, obj, this); RuntimeInfo info = RuntimeInfo.forRuntime(context.getRuntime()); result.force_encoding(context, info.utf8.get()); return result; } private static boolean matchClosingBrace(ByteList bl, int pos, int len, int brace) { for (int endPos = len - 1; endPos > pos; endPos--) { int b = bl.get(endPos); if (Character.isWhitespace(b)) continue; return b == brace; } return false; } @JRubyMethod(name="[]", required=1) public IRubyObject op_aref(ThreadContext context, IRubyObject vName) { String name = vName.asJavaString(); if (getMetaClass().isMethodBound(name, true)) { return send(context, vName, Block.NULL_BLOCK); } else { IRubyObject value = getInstanceVariables().getInstanceVariable("@" + name); return value == null ? context.nil : value; } } @JRubyMethod(name="[]=", required=2) public IRubyObject op_aset(ThreadContext context, IRubyObject vName, IRubyObject value) { String name = vName.asJavaString(); String nameWriter = name + "="; if (getMetaClass().isMethodBound(nameWriter, true)) { return send(context, context.getRuntime().newString(nameWriter), value, Block.NULL_BLOCK); } else { getInstanceVariables().setInstanceVariable("@" + name, value); } return context.getRuntime().getNil(); } public ByteList getIndent() { return indent; } @JRubyMethod(name="indent") public RubyString indent_get(ThreadContext context) { return context.getRuntime().newString(indent); } @JRubyMethod(name="indent=") public IRubyObject indent_set(ThreadContext context, IRubyObject indent) { this.indent = prepareByteList(context, indent); return indent; } public ByteList getSpace() { return space; } @JRubyMethod(name="space") public RubyString space_get(ThreadContext context) { return context.getRuntime().newString(space); } @JRubyMethod(name="space=") public IRubyObject space_set(ThreadContext context, IRubyObject space) { this.space = prepareByteList(context, space); return space; } public ByteList getSpaceBefore() { return spaceBefore; } @JRubyMethod(name="space_before") public RubyString space_before_get(ThreadContext context) { return context.getRuntime().newString(spaceBefore); } @JRubyMethod(name="space_before=") public IRubyObject space_before_set(ThreadContext context, IRubyObject spaceBefore) { this.spaceBefore = prepareByteList(context, spaceBefore); return spaceBefore; } public ByteList getObjectNl() { return objectNl; } @JRubyMethod(name="object_nl") public RubyString object_nl_get(ThreadContext context) { return context.getRuntime().newString(objectNl); } @JRubyMethod(name="object_nl=") public IRubyObject object_nl_set(ThreadContext context, IRubyObject objectNl) { this.objectNl = prepareByteList(context, objectNl); return objectNl; } public ByteList getArrayNl() { return arrayNl; } @JRubyMethod(name="array_nl") public RubyString array_nl_get(ThreadContext context) { return context.getRuntime().newString(arrayNl); } @JRubyMethod(name="array_nl=") public IRubyObject array_nl_set(ThreadContext context, IRubyObject arrayNl) { this.arrayNl = prepareByteList(context, arrayNl); return arrayNl; } @JRubyMethod(name="check_circular?") public RubyBoolean check_circular_p(ThreadContext context) { return context.getRuntime().newBoolean(maxNesting != 0); } /** * Returns the maximum level of nesting configured for this state. */ public int getMaxNesting() { return maxNesting; } @JRubyMethod(name="max_nesting") public RubyInteger max_nesting_get(ThreadContext context) { return context.getRuntime().newFixnum(maxNesting); } @JRubyMethod(name="max_nesting=") public IRubyObject max_nesting_set(IRubyObject max_nesting) { maxNesting = RubyNumeric.fix2int(max_nesting); return max_nesting; } public boolean allowNaN() { return allowNaN; } @JRubyMethod(name="allow_nan?") public RubyBoolean allow_nan_p(ThreadContext context) { return context.getRuntime().newBoolean(allowNaN); } public boolean asciiOnly() { return asciiOnly; } @JRubyMethod(name="ascii_only?") public RubyBoolean ascii_only_p(ThreadContext context) { return context.getRuntime().newBoolean(asciiOnly); } @JRubyMethod(name="buffer_initial_length") public RubyInteger buffer_initial_length_get(ThreadContext context) { return context.getRuntime().newFixnum(bufferInitialLength); } @JRubyMethod(name="buffer_initial_length=") public IRubyObject buffer_initial_length_set(IRubyObject buffer_initial_length) { int newLength = RubyNumeric.fix2int(buffer_initial_length); if (newLength > 0) bufferInitialLength = newLength; return buffer_initial_length; } public int getDepth() { return depth; } @JRubyMethod(name="depth") public RubyInteger depth_get(ThreadContext context) { return context.getRuntime().newFixnum(depth); } @JRubyMethod(name="depth=") public IRubyObject depth_set(IRubyObject vDepth) { depth = RubyNumeric.fix2int(vDepth); return vDepth; } private ByteList prepareByteList(ThreadContext context, IRubyObject value) { RubyString str = value.convertToString(); RuntimeInfo info = RuntimeInfo.forRuntime(context.getRuntime()); if (str.encoding(context) != info.utf8.get()) { str = (RubyString)str.encode(context, info.utf8.get()); } return str.getByteList().dup(); } /** * State#configure(opts) * *

Configures this State instance with the {@link RubyHash Hash} * opts, and returns itself. * @param vOpts The options hash * @return The receiver */ @JRubyMethod(alias = "merge") public IRubyObject configure(ThreadContext context, IRubyObject vOpts) { OptionsReader opts = new OptionsReader(context, vOpts); ByteList indent = opts.getString("indent"); if (indent != null) this.indent = indent; ByteList space = opts.getString("space"); if (space != null) this.space = space; ByteList spaceBefore = opts.getString("space_before"); if (spaceBefore != null) this.spaceBefore = spaceBefore; ByteList arrayNl = opts.getString("array_nl"); if (arrayNl != null) this.arrayNl = arrayNl; ByteList objectNl = opts.getString("object_nl"); if (objectNl != null) this.objectNl = objectNl; maxNesting = opts.getInt("max_nesting", DEFAULT_MAX_NESTING); allowNaN = opts.getBool("allow_nan", DEFAULT_ALLOW_NAN); asciiOnly = opts.getBool("ascii_only", DEFAULT_ASCII_ONLY); bufferInitialLength = opts.getInt("buffer_initial_length", DEFAULT_BUFFER_INITIAL_LENGTH); depth = opts.getInt("depth", 0); return this; } /** * State#to_h() * *

Returns the configuration instance variables as a hash, that can be * passed to the configure method. * @return the hash */ @JRubyMethod(alias = "to_hash") public RubyHash to_h(ThreadContext context) { Ruby runtime = context.getRuntime(); RubyHash result = RubyHash.newHash(runtime); result.op_aset(context, runtime.newSymbol("indent"), indent_get(context)); result.op_aset(context, runtime.newSymbol("space"), space_get(context)); result.op_aset(context, runtime.newSymbol("space_before"), space_before_get(context)); result.op_aset(context, runtime.newSymbol("object_nl"), object_nl_get(context)); result.op_aset(context, runtime.newSymbol("array_nl"), array_nl_get(context)); result.op_aset(context, runtime.newSymbol("allow_nan"), allow_nan_p(context)); result.op_aset(context, runtime.newSymbol("ascii_only"), ascii_only_p(context)); result.op_aset(context, runtime.newSymbol("max_nesting"), max_nesting_get(context)); result.op_aset(context, runtime.newSymbol("depth"), depth_get(context)); result.op_aset(context, runtime.newSymbol("buffer_initial_length"), buffer_initial_length_get(context)); for (String name: getInstanceVariableNameList()) { result.op_aset(context, runtime.newSymbol(name.substring(1)), getInstanceVariables().getInstanceVariable(name)); } return result; } public int increaseDepth() { depth++; checkMaxNesting(); return depth; } public int decreaseDepth() { return --depth; } /** * Checks if the current depth is allowed as per this state's options. * @param context * @param depth The corrent depth */ private void checkMaxNesting() { if (maxNesting != 0 && depth > maxNesting) { depth--; throw Utils.newException(getRuntime().getCurrentContext(), Utils.M_NESTING_ERROR, "nesting of " + depth + " is too deep"); } } } ruby-json-2.1.0+dfsg.orig/java/src/json/ext/StringDecoder.java0000644000175000017500000001206313113111601023544 0ustar boutilboutil/* * This code is copyrighted work by Daniel Luz . * * Distributed under the Ruby license: https://www.ruby-lang.org/en/about/license.txt */ package json.ext; import org.jruby.exceptions.RaiseException; import org.jruby.runtime.ThreadContext; import org.jruby.util.ByteList; /** * A decoder that reads a JSON-encoded string from the given sources and * returns its decoded form on a new ByteList. Escaped Unicode characters * are encoded as UTF-8. */ final class StringDecoder extends ByteListTranscoder { /** * Stores the offset of the high surrogate when reading a surrogate pair, * or -1 when not. */ private int surrogatePairStart = -1; // Array used for writing multi-byte characters into the buffer at once private final byte[] aux = new byte[4]; StringDecoder(ThreadContext context) { super(context); } ByteList decode(ByteList src, int start, int end) { ByteList out = new ByteList(end - start); out.setEncoding(src.getEncoding()); init(src, start, end, out); while (hasNext()) { handleChar(readUtf8Char()); } quoteStop(pos); return out; } private void handleChar(int c) { if (c == '\\') { quoteStop(charStart); handleEscapeSequence(); } else { quoteStart(); } } private void handleEscapeSequence() { ensureMin(1); switch (readUtf8Char()) { case 'b': append('\b'); break; case 'f': append('\f'); break; case 'n': append('\n'); break; case 'r': append('\r'); break; case 't': append('\t'); break; case 'u': ensureMin(4); int cp = readHex(); if (Character.isHighSurrogate((char)cp)) { handleLowSurrogate((char)cp); } else if (Character.isLowSurrogate((char)cp)) { // low surrogate with no high surrogate throw invalidUtf8(); } else { writeUtf8Char(cp); } break; default: // '\\', '"', '/'... quoteStart(); } } private void handleLowSurrogate(char highSurrogate) { surrogatePairStart = charStart; ensureMin(1); int lowSurrogate = readUtf8Char(); if (lowSurrogate == '\\') { ensureMin(5); if (readUtf8Char() != 'u') throw invalidUtf8(); lowSurrogate = readHex(); } if (Character.isLowSurrogate((char)lowSurrogate)) { writeUtf8Char(Character.toCodePoint(highSurrogate, (char)lowSurrogate)); surrogatePairStart = -1; } else { throw invalidUtf8(); } } private void writeUtf8Char(int codePoint) { if (codePoint < 0x80) { append(codePoint); } else if (codePoint < 0x800) { aux[0] = (byte)(0xc0 | (codePoint >>> 6)); aux[1] = tailByte(codePoint & 0x3f); append(aux, 0, 2); } else if (codePoint < 0x10000) { aux[0] = (byte)(0xe0 | (codePoint >>> 12)); aux[1] = tailByte(codePoint >>> 6); aux[2] = tailByte(codePoint); append(aux, 0, 3); } else { aux[0] = (byte)(0xf0 | codePoint >>> 18); aux[1] = tailByte(codePoint >>> 12); aux[2] = tailByte(codePoint >>> 6); aux[3] = tailByte(codePoint); append(aux, 0, 4); } } private byte tailByte(int value) { return (byte)(0x80 | (value & 0x3f)); } /** * Reads a 4-digit unsigned hexadecimal number from the source. */ private int readHex() { int numberStart = pos; int result = 0; int length = 4; for (int i = 0; i < length; i++) { int digit = readUtf8Char(); int digitValue; if (digit >= '0' && digit <= '9') { digitValue = digit - '0'; } else if (digit >= 'a' && digit <= 'f') { digitValue = 10 + digit - 'a'; } else if (digit >= 'A' && digit <= 'F') { digitValue = 10 + digit - 'A'; } else { throw new NumberFormatException("Invalid base 16 number " + src.subSequence(numberStart, numberStart + length)); } result = result * 16 + digitValue; } return result; } @Override protected RaiseException invalidUtf8() { ByteList message = new ByteList( ByteList.plain("partial character in source, " + "but hit end near ")); int start = surrogatePairStart != -1 ? surrogatePairStart : charStart; message.append(src, start, srcEnd - start); return Utils.newException(context, Utils.M_PARSER_ERROR, context.getRuntime().newString(message)); } } ruby-json-2.1.0+dfsg.orig/java/src/json/ext/ParserService.java0000644000175000017500000000230513113111601023563 0ustar boutilboutil/* * This code is copyrighted work by Daniel Luz . * * Distributed under the Ruby license: https://www.ruby-lang.org/en/about/license.txt */ package json.ext; import java.io.IOException; import java.lang.ref.WeakReference; import org.jruby.Ruby; import org.jruby.RubyClass; import org.jruby.RubyModule; import org.jruby.runtime.load.BasicLibraryService; /** * The service invoked by JRuby's {@link org.jruby.runtime.load.LoadService LoadService}. * Defines the JSON::Ext::Parser class. * @author mernen */ public class ParserService implements BasicLibraryService { public boolean basicLoad(Ruby runtime) throws IOException { runtime.getLoadService().require("json/common"); RuntimeInfo info = RuntimeInfo.initRuntime(runtime); info.jsonModule = new WeakReference(runtime.defineModule("JSON")); RubyModule jsonExtModule = info.jsonModule.get().defineModuleUnder("Ext"); RubyClass parserClass = jsonExtModule.defineClassUnder("Parser", runtime.getObject(), Parser.ALLOCATOR); parserClass.defineAnnotatedMethods(Parser.class); return true; } } ruby-json-2.1.0+dfsg.orig/java/src/json/ext/Parser.rl0000644000175000017500000007306413113111601021750 0ustar boutilboutil/* * This code is copyrighted work by Daniel Luz . * * Distributed under the Ruby license: https://www.ruby-lang.org/en/about/license.txt */ package json.ext; import org.jruby.Ruby; import org.jruby.RubyArray; import org.jruby.RubyClass; import org.jruby.RubyEncoding; import org.jruby.RubyFloat; import org.jruby.RubyHash; import org.jruby.RubyInteger; import org.jruby.RubyModule; import org.jruby.RubyNumeric; import org.jruby.RubyObject; import org.jruby.RubyString; import org.jruby.anno.JRubyMethod; import org.jruby.exceptions.JumpException; import org.jruby.exceptions.RaiseException; import org.jruby.runtime.Block; import org.jruby.runtime.ObjectAllocator; import org.jruby.runtime.ThreadContext; import org.jruby.runtime.Visibility; import org.jruby.runtime.builtin.IRubyObject; import org.jruby.util.ByteList; import org.jruby.util.ConvertBytes; import static org.jruby.util.ConvertDouble.DoubleConverter; /** * The JSON::Ext::Parser class. * *

This is the JSON parser implemented as a Java class. To use it as the * standard parser, set *

JSON.parser = JSON::Ext::Parser
* This is performed for you when you include "json/ext". * *

This class does not perform the actual parsing, just acts as an interface * to Ruby code. When the {@link #parse()} method is invoked, a * Parser.ParserSession object is instantiated, which handles the process. * * @author mernen */ public class Parser extends RubyObject { private final RuntimeInfo info; private RubyString vSource; private RubyString createId; private boolean createAdditions; private int maxNesting; private boolean allowNaN; private boolean symbolizeNames; private RubyClass objectClass; private RubyClass arrayClass; private RubyClass decimalClass; private RubyHash match_string; private static final int DEFAULT_MAX_NESTING = 100; private static final ByteList JSON_MINUS_INFINITY = new ByteList(ByteList.plain("-Infinity")); // constant names in the JSON module containing those values private static final String CONST_NAN = "NaN"; private static final String CONST_INFINITY = "Infinity"; private static final String CONST_MINUS_INFINITY = "MinusInfinity"; static final ObjectAllocator ALLOCATOR = new ObjectAllocator() { public IRubyObject allocate(Ruby runtime, RubyClass klazz) { return new Parser(runtime, klazz); } }; /** * Multiple-value return for internal parser methods. * *

All the parseStuff methods return instances of * ParserResult when successful, or null when * there's a problem with the input data. */ static final class ParserResult { /** * The result of the successful parsing. Should never be * null. */ IRubyObject result; /** * The point where the parser returned. */ int p; void update(IRubyObject result, int p) { this.result = result; this.p = p; } } public Parser(Ruby runtime, RubyClass metaClass) { super(runtime, metaClass); info = RuntimeInfo.forRuntime(runtime); } /** * Parser.new(source, opts = {}) * *

Creates a new JSON::Ext::Parser instance for the string * source. * It will be configured by the opts Hash. * opts can have the following keys: * *

*
:max_nesting *
The maximum depth of nesting allowed in the parsed data * structures. Disable depth checking with :max_nesting => false|nil|0, * it defaults to 100. * *
:allow_nan *
If set to true, allow NaN, * Infinity and -Infinity in defiance of RFC 4627 * to be parsed by the Parser. This option defaults to false. * *
:symbolize_names *
If set to true, returns symbols for the names (keys) in * a JSON object. Otherwise strings are returned, which is also the default. * *
:create_additions *
If set to false, the Parser doesn't create additions * even if a matching class and create_id was found. This option * defaults to true. * *
:object_class *
Defaults to Hash. * *
:array_class *
Defaults to Array. * *
:decimal_class *
Specifies which class to use instead of the default (Float) when * parsing decimal numbers. This class must accept a single string argument * in its constructor. *
*/ @JRubyMethod(name = "new", required = 1, optional = 1, meta = true) public static IRubyObject newInstance(IRubyObject clazz, IRubyObject[] args, Block block) { Parser parser = (Parser)((RubyClass)clazz).allocate(); parser.callInit(args, block); return parser; } @JRubyMethod(required = 1, optional = 1, visibility = Visibility.PRIVATE) public IRubyObject initialize(ThreadContext context, IRubyObject[] args) { Ruby runtime = context.getRuntime(); if (this.vSource != null) { throw runtime.newTypeError("already initialized instance"); } OptionsReader opts = new OptionsReader(context, args.length > 1 ? args[1] : null); this.maxNesting = opts.getInt("max_nesting", DEFAULT_MAX_NESTING); this.allowNaN = opts.getBool("allow_nan", false); this.symbolizeNames = opts.getBool("symbolize_names", false); this.createId = opts.getString("create_id", getCreateId(context)); this.createAdditions = opts.getBool("create_additions", false); this.objectClass = opts.getClass("object_class", runtime.getHash()); this.arrayClass = opts.getClass("array_class", runtime.getArray()); this.decimalClass = opts.getClass("decimal_class", null); this.match_string = opts.getHash("match_string"); if(symbolizeNames && createAdditions) { throw runtime.newArgumentError( "options :symbolize_names and :create_additions cannot be " + " used in conjunction" ); } this.vSource = args[0].convertToString(); this.vSource = convertEncoding(context, vSource); return this; } /** * Checks the given string's encoding. If a non-UTF-8 encoding is detected, * a converted copy is returned. * Returns the source string if no conversion is needed. */ private RubyString convertEncoding(ThreadContext context, RubyString source) { RubyEncoding encoding = (RubyEncoding)source.encoding(context); if (encoding == info.ascii8bit.get()) { if (source.isFrozen()) { source = (RubyString) source.dup(); } source.force_encoding(context, info.utf8.get()); } else { source = (RubyString) source.encode(context, info.utf8.get()); } return source; } /** * Checks the first four bytes of the given ByteList to infer its encoding, * using the principle demonstrated on section 3 of RFC 4627 (JSON). */ private static String sniffByteList(ByteList bl) { if (bl.length() < 4) return null; if (bl.get(0) == 0 && bl.get(2) == 0) { return bl.get(1) == 0 ? "utf-32be" : "utf-16be"; } if (bl.get(1) == 0 && bl.get(3) == 0) { return bl.get(2) == 0 ? "utf-32le" : "utf-16le"; } return null; } /** * Assumes the given (binary) RubyString to be in the given encoding, then * converts it to UTF-8. */ private RubyString reinterpretEncoding(ThreadContext context, RubyString str, String sniffedEncoding) { RubyEncoding actualEncoding = info.getEncoding(context, sniffedEncoding); RubyEncoding targetEncoding = info.utf8.get(); RubyString dup = (RubyString)str.dup(); dup.force_encoding(context, actualEncoding); return (RubyString)dup.encode_bang(context, targetEncoding); } /** * Parser#parse() * *

Parses the current JSON text source and returns the * complete data structure as a result. */ @JRubyMethod public IRubyObject parse(ThreadContext context) { return new ParserSession(this, context, info).parse(); } /** * Parser#source() * *

Returns a copy of the current source string, that was * used to construct this Parser. */ @JRubyMethod(name = "source") public IRubyObject source_get() { return checkAndGetSource().dup(); } public RubyString checkAndGetSource() { if (vSource != null) { return vSource; } else { throw getRuntime().newTypeError("uninitialized instance"); } } /** * Queries JSON.create_id. Returns null if it is * set to nil or false, and a String if not. */ private RubyString getCreateId(ThreadContext context) { IRubyObject v = info.jsonModule.get().callMethod(context, "create_id"); return v.isTrue() ? v.convertToString() : null; } /** * A string parsing session. * *

Once a ParserSession is instantiated, the source string should not * change until the parsing is complete. The ParserSession object assumes * the source {@link RubyString} is still associated to its original * {@link ByteList}, which in turn must still be bound to the same * byte[] value (and on the same offset). */ // Ragel uses lots of fall-through @SuppressWarnings("fallthrough") private static class ParserSession { private final Parser parser; private final ThreadContext context; private final RuntimeInfo info; private final ByteList byteList; private final ByteList view; private final byte[] data; private final StringDecoder decoder; private int currentNesting = 0; private final DoubleConverter dc; // initialization value for all state variables. // no idea about the origins of this value, ask Flori ;) private static final int EVIL = 0x666; private ParserSession(Parser parser, ThreadContext context, RuntimeInfo info) { this.parser = parser; this.context = context; this.info = info; this.byteList = parser.checkAndGetSource().getByteList(); this.data = byteList.unsafeBytes(); this.view = new ByteList(data, false); this.decoder = new StringDecoder(context); this.dc = new DoubleConverter(); } private RaiseException unexpectedToken(int absStart, int absEnd) { RubyString msg = getRuntime().newString("unexpected token at '") .cat(data, absStart, absEnd - absStart) .cat((byte)'\''); return newException(Utils.M_PARSER_ERROR, msg); } private Ruby getRuntime() { return context.getRuntime(); } %%{ machine JSON_common; cr = '\n'; cr_neg = [^\n]; ws = [ \t\r\n]; c_comment = '/*' ( any* - (any* '*/' any* ) ) '*/'; cpp_comment = '//' cr_neg* cr; comment = c_comment | cpp_comment; ignore = ws | comment; name_separator = ':'; value_separator = ','; Vnull = 'null'; Vfalse = 'false'; Vtrue = 'true'; VNaN = 'NaN'; VInfinity = 'Infinity'; VMinusInfinity = '-Infinity'; begin_value = [nft"\-[{NI] | digit; begin_object = '{'; end_object = '}'; begin_array = '['; end_array = ']'; begin_string = '"'; begin_name = begin_string; begin_number = digit | '-'; }%% %%{ machine JSON_value; include JSON_common; write data; action parse_null { result = getRuntime().getNil(); } action parse_false { result = getRuntime().getFalse(); } action parse_true { result = getRuntime().getTrue(); } action parse_nan { if (parser.allowNaN) { result = getConstant(CONST_NAN); } else { throw unexpectedToken(p - 2, pe); } } action parse_infinity { if (parser.allowNaN) { result = getConstant(CONST_INFINITY); } else { throw unexpectedToken(p - 7, pe); } } action parse_number { if (pe > fpc + 8 && absSubSequence(fpc, fpc + 9).equals(JSON_MINUS_INFINITY)) { if (parser.allowNaN) { result = getConstant(CONST_MINUS_INFINITY); fexec p + 10; fhold; fbreak; } else { throw unexpectedToken(p, pe); } } parseFloat(res, fpc, pe); if (res.result != null) { result = res.result; fexec res.p; } parseInteger(res, fpc, pe); if (res.result != null) { result = res.result; fexec res.p; } fhold; fbreak; } action parse_string { parseString(res, fpc, pe); if (res.result == null) { fhold; fbreak; } else { result = res.result; fexec res.p; } } action parse_array { currentNesting++; parseArray(res, fpc, pe); currentNesting--; if (res.result == null) { fhold; fbreak; } else { result = res.result; fexec res.p; } } action parse_object { currentNesting++; parseObject(res, fpc, pe); currentNesting--; if (res.result == null) { fhold; fbreak; } else { result = res.result; fexec res.p; } } action exit { fhold; fbreak; } main := ( Vnull @parse_null | Vfalse @parse_false | Vtrue @parse_true | VNaN @parse_nan | VInfinity @parse_infinity | begin_number >parse_number | begin_string >parse_string | begin_array >parse_array | begin_object >parse_object ) %*exit; }%% void parseValue(ParserResult res, int p, int pe) { int cs = EVIL; IRubyObject result = null; %% write init; %% write exec; if (cs >= JSON_value_first_final && result != null) { res.update(result, p); } else { res.update(null, p); } } %%{ machine JSON_integer; write data; action exit { fhold; fbreak; } main := '-'? ( '0' | [1-9][0-9]* ) ( ^[0-9]? @exit ); }%% void parseInteger(ParserResult res, int p, int pe) { int new_p = parseIntegerInternal(p, pe); if (new_p == -1) { res.update(null, p); return; } RubyInteger number = createInteger(p, new_p); res.update(number, new_p + 1); return; } int parseIntegerInternal(int p, int pe) { int cs = EVIL; %% write init; int memo = p; %% write exec; if (cs < JSON_integer_first_final) { return -1; } return p; } RubyInteger createInteger(int p, int new_p) { Ruby runtime = getRuntime(); ByteList num = absSubSequence(p, new_p); return bytesToInum(runtime, num); } RubyInteger bytesToInum(Ruby runtime, ByteList num) { return runtime.is1_9() ? ConvertBytes.byteListToInum19(runtime, num, 10, true) : ConvertBytes.byteListToInum(runtime, num, 10, true); } %%{ machine JSON_float; include JSON_common; write data; action exit { fhold; fbreak; } main := '-'? ( ( ( '0' | [1-9][0-9]* ) '.' [0-9]+ ( [Ee] [+\-]?[0-9]+ )? ) | ( ( '0' | [1-9][0-9]* ) ( [Ee] [+\-]? [0-9]+ ) ) ) ( ^[0-9Ee.\-]? @exit ); }%% void parseFloat(ParserResult res, int p, int pe) { int new_p = parseFloatInternal(p, pe); if (new_p == -1) { res.update(null, p); return; } IRubyObject number = parser.decimalClass == null ? createFloat(p, new_p) : createCustomDecimal(p, new_p); res.update(number, new_p + 1); return; } int parseFloatInternal(int p, int pe) { int cs = EVIL; %% write init; int memo = p; %% write exec; if (cs < JSON_float_first_final) { return -1; } return p; } RubyFloat createFloat(int p, int new_p) { Ruby runtime = getRuntime(); ByteList num = absSubSequence(p, new_p); return RubyFloat.newFloat(runtime, dc.parse(num, true, runtime.is1_9())); } IRubyObject createCustomDecimal(int p, int new_p) { Ruby runtime = getRuntime(); ByteList num = absSubSequence(p, new_p); IRubyObject numString = runtime.newString(num.toString()); return parser.decimalClass.callMethod(context, "new", numString); } %%{ machine JSON_string; include JSON_common; write data; action parse_string { int offset = byteList.begin(); ByteList decoded = decoder.decode(byteList, memo + 1 - offset, p - offset); result = getRuntime().newString(decoded); if (result == null) { fhold; fbreak; } else { fexec p + 1; } } action exit { fhold; fbreak; } main := '"' ( ( ^(["\\]|0..0x1f) | '\\'["\\/bfnrt] | '\\u'[0-9a-fA-F]{4} | '\\'^(["\\/bfnrtu]|0..0x1f) )* %parse_string ) '"' @exit; }%% void parseString(ParserResult res, int p, int pe) { int cs = EVIL; IRubyObject result = null; %% write init; int memo = p; %% write exec; if (parser.createAdditions) { RubyHash matchString = parser.matchString; if (matchString != null) { final IRubyObject[] memoArray = { result, null }; try { matchString.visitAll(new RubyHash.Visitor() { @Override public void visit(IRubyObject pattern, IRubyObject klass) { if (pattern.callMethod(context, "===", memoArray[0]).isTrue()) { memoArray[1] = klass; throw JumpException.SPECIAL_JUMP; } } }); } catch (JumpException e) { } if (memoArray[1] != null) { RubyClass klass = (RubyClass) memoArray[1]; if (klass.respondsTo("json_creatable?") && klass.callMethod(context, "json_creatable?").isTrue()) { result = klass.callMethod(context, "json_create", result); } } } } if (cs >= JSON_string_first_final && result != null) { if (result instanceof RubyString) { ((RubyString)result).force_encoding(context, info.utf8.get()); } res.update(result, p + 1); } else { res.update(null, p + 1); } } %%{ machine JSON_array; include JSON_common; write data; action parse_value { parseValue(res, fpc, pe); if (res.result == null) { fhold; fbreak; } else { if (parser.arrayClass == getRuntime().getArray()) { ((RubyArray)result).append(res.result); } else { result.callMethod(context, "<<", res.result); } fexec res.p; } } action exit { fhold; fbreak; } next_element = value_separator ignore* begin_value >parse_value; main := begin_array ignore* ( ( begin_value >parse_value ignore* ) ( ignore* next_element ignore* )* )? ignore* end_array @exit; }%% void parseArray(ParserResult res, int p, int pe) { int cs = EVIL; if (parser.maxNesting > 0 && currentNesting > parser.maxNesting) { throw newException(Utils.M_NESTING_ERROR, "nesting of " + currentNesting + " is too deep"); } IRubyObject result; if (parser.arrayClass == getRuntime().getArray()) { result = RubyArray.newArray(getRuntime()); } else { result = parser.arrayClass.newInstance(context, IRubyObject.NULL_ARRAY, Block.NULL_BLOCK); } %% write init; %% write exec; if (cs >= JSON_array_first_final) { res.update(result, p + 1); } else { throw unexpectedToken(p, pe); } } %%{ machine JSON_object; include JSON_common; write data; action parse_value { parseValue(res, fpc, pe); if (res.result == null) { fhold; fbreak; } else { if (parser.objectClass == getRuntime().getHash()) { ((RubyHash)result).op_aset(context, lastName, res.result); } else { result.callMethod(context, "[]=", new IRubyObject[] { lastName, res.result }); } fexec res.p; } } action parse_name { parseString(res, fpc, pe); if (res.result == null) { fhold; fbreak; } else { RubyString name = (RubyString)res.result; if (parser.symbolizeNames) { lastName = context.getRuntime().is1_9() ? name.intern19() : name.intern(); } else { lastName = name; } fexec res.p; } } action exit { fhold; fbreak; } pair = ignore* begin_name >parse_name ignore* name_separator ignore* begin_value >parse_value; next_pair = ignore* value_separator pair; main := ( begin_object (pair (next_pair)*)? ignore* end_object ) @exit; }%% void parseObject(ParserResult res, int p, int pe) { int cs = EVIL; IRubyObject lastName = null; boolean objectDefault = true; if (parser.maxNesting > 0 && currentNesting > parser.maxNesting) { throw newException(Utils.M_NESTING_ERROR, "nesting of " + currentNesting + " is too deep"); } // this is guaranteed to be a RubyHash due to the earlier // allocator test at OptionsReader#getClass IRubyObject result; if (parser.objectClass == getRuntime().getHash()) { result = RubyHash.newHash(getRuntime()); } else { objectDefault = false; result = parser.objectClass.newInstance(context, IRubyObject.NULL_ARRAY, Block.NULL_BLOCK); } %% write init; %% write exec; if (cs < JSON_object_first_final) { res.update(null, p + 1); return; } IRubyObject returnedResult = result; // attempt to de-serialize object if (parser.createAdditions) { IRubyObject vKlassName; if (objectDefault) { vKlassName = ((RubyHash)result).op_aref(context, parser.createId); } else { vKlassName = result.callMethod(context, "[]", parser.createId); } if (!vKlassName.isNil()) { // might throw ArgumentError, we let it propagate IRubyObject klass = parser.info.jsonModule.get(). callMethod(context, "deep_const_get", vKlassName); if (klass.respondsTo("json_creatable?") && klass.callMethod(context, "json_creatable?").isTrue()) { returnedResult = klass.callMethod(context, "json_create", result); } } } res.update(returnedResult, p + 1); } %%{ machine JSON; include JSON_common; write data; action parse_value { parseValue(res, fpc, pe); if (res.result == null) { fhold; fbreak; } else { result = res.result; fexec res.p; } } main := ignore* ( begin_value >parse_value) ignore*; }%% public IRubyObject parseImplemetation() { int cs = EVIL; int p, pe; IRubyObject result = null; ParserResult res = new ParserResult(); %% write init; p = byteList.begin(); pe = p + byteList.length(); %% write exec; if (cs >= JSON_first_final && p == pe) { return result; } else { throw unexpectedToken(p, pe); } } public IRubyObject parse() { return parseImplemetation(); } /** * Updates the "view" bytelist with the new offsets and returns it. * @param start * @param end */ private ByteList absSubSequence(int absStart, int absEnd) { view.setBegin(absStart); view.setRealSize(absEnd - absStart); return view; } /** * Retrieves a constant directly descended from the JSON module. * @param name The constant name */ private IRubyObject getConstant(String name) { return parser.info.jsonModule.get().getConstant(name); } private RaiseException newException(String className, String message) { return Utils.newException(context, className, message); } private RaiseException newException(String className, RubyString message) { return Utils.newException(context, className, message); } private RaiseException newException(String className, String messageBegin, ByteList messageEnd) { return newException(className, getRuntime().newString(messageBegin).cat(messageEnd)); } } } ruby-json-2.1.0+dfsg.orig/java/src/json/ext/StringEncoder.java0000644000175000017500000000666013113111601023564 0ustar boutilboutil/* * This code is copyrighted work by Daniel Luz . * * Distributed under the Ruby license: https://www.ruby-lang.org/en/about/license.txt */ package json.ext; import org.jruby.exceptions.RaiseException; import org.jruby.runtime.ThreadContext; import org.jruby.util.ByteList; /** * An encoder that reads from the given source and outputs its representation * to another ByteList. The source string is fully checked for UTF-8 validity, * and throws a GeneratorError if any problem is found. */ final class StringEncoder extends ByteListTranscoder { private final boolean asciiOnly; // Escaped characters will reuse this array, to avoid new allocations // or appending them byte-by-byte private final byte[] aux = new byte[] {/* First unicode character */ '\\', 'u', 0, 0, 0, 0, /* Second unicode character (for surrogate pairs) */ '\\', 'u', 0, 0, 0, 0, /* "\X" characters */ '\\', 0}; // offsets on the array above private static final int ESCAPE_UNI1_OFFSET = 0; private static final int ESCAPE_UNI2_OFFSET = ESCAPE_UNI1_OFFSET + 6; private static final int ESCAPE_CHAR_OFFSET = ESCAPE_UNI2_OFFSET + 6; /** Array used for code point decomposition in surrogates */ private final char[] utf16 = new char[2]; private static final byte[] HEX = new byte[] {'0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'a', 'b', 'c', 'd', 'e', 'f'}; StringEncoder(ThreadContext context, boolean asciiOnly) { super(context); this.asciiOnly = asciiOnly; } void encode(ByteList src, ByteList out) { init(src, out); append('"'); while (hasNext()) { handleChar(readUtf8Char()); } quoteStop(pos); append('"'); } private void handleChar(int c) { switch (c) { case '"': case '\\': escapeChar((char)c); break; case '\n': escapeChar('n'); break; case '\r': escapeChar('r'); break; case '\t': escapeChar('t'); break; case '\f': escapeChar('f'); break; case '\b': escapeChar('b'); break; default: if (c >= 0x20 && c <= 0x7f || (c >= 0x80 && !asciiOnly)) { quoteStart(); } else { quoteStop(charStart); escapeUtf8Char(c); } } } private void escapeChar(char c) { quoteStop(charStart); aux[ESCAPE_CHAR_OFFSET + 1] = (byte)c; append(aux, ESCAPE_CHAR_OFFSET, 2); } private void escapeUtf8Char(int codePoint) { int numChars = Character.toChars(codePoint, utf16, 0); escapeCodeUnit(utf16[0], ESCAPE_UNI1_OFFSET + 2); if (numChars > 1) escapeCodeUnit(utf16[1], ESCAPE_UNI2_OFFSET + 2); append(aux, ESCAPE_UNI1_OFFSET, 6 * numChars); } private void escapeCodeUnit(char c, int auxOffset) { for (int i = 0; i < 4; i++) { aux[auxOffset + i] = HEX[(c >>> (12 - 4 * i)) & 0xf]; } } @Override protected RaiseException invalidUtf8() { return Utils.newException(context, Utils.M_GENERATOR_ERROR, "source sequence is illegal/malformed utf-8"); } } ruby-json-2.1.0+dfsg.orig/java/src/json/ext/Generator.java0000644000175000017500000004126413113111601022743 0ustar boutilboutil/* * This code is copyrighted work by Daniel Luz . * * Distributed under the Ruby license: https://www.ruby-lang.org/en/about/license.txt */ package json.ext; import org.jruby.Ruby; import org.jruby.RubyArray; import org.jruby.RubyBignum; import org.jruby.RubyBoolean; import org.jruby.RubyClass; import org.jruby.RubyFixnum; import org.jruby.RubyFloat; import org.jruby.RubyHash; import org.jruby.RubyNumeric; import org.jruby.RubyString; import org.jruby.runtime.ThreadContext; import org.jruby.runtime.builtin.IRubyObject; import org.jruby.util.ByteList; public final class Generator { private Generator() { throw new RuntimeException(); } /** * Encodes the given object as a JSON string, using the given handler. */ static RubyString generateJson(ThreadContext context, T object, Handler handler, IRubyObject[] args) { Session session = new Session(context, args.length > 0 ? args[0] : null); return session.infect(handler.generateNew(session, object)); } /** * Encodes the given object as a JSON string, detecting the appropriate handler * for the given object. */ static RubyString generateJson(ThreadContext context, T object, IRubyObject[] args) { Handler handler = getHandlerFor(context.getRuntime(), object); return generateJson(context, object, handler, args); } /** * Encodes the given object as a JSON string, using the appropriate * handler if one is found or calling #to_json if not. */ public static RubyString generateJson(ThreadContext context, T object, GeneratorState config) { Session session = new Session(context, config); Handler handler = getHandlerFor(context.getRuntime(), object); return handler.generateNew(session, object); } /** * Returns the best serialization handler for the given object. */ // Java's generics can't handle this satisfactorily, so I'll just leave // the best I could get and ignore the warnings @SuppressWarnings("unchecked") private static Handler getHandlerFor(Ruby runtime, T object) { RubyClass metaClass = object.getMetaClass(); if (metaClass == runtime.getString()) return (Handler)STRING_HANDLER; if (metaClass == runtime.getFixnum()) return (Handler)FIXNUM_HANDLER; if (metaClass == runtime.getHash()) return (Handler)HASH_HANDLER; if (metaClass == runtime.getArray()) return (Handler)ARRAY_HANDLER; if (object.isNil()) return (Handler)NIL_HANDLER; if (object == runtime.getTrue()) return (Handler)TRUE_HANDLER; if (object == runtime.getFalse()) return (Handler)FALSE_HANDLER; if (metaClass == runtime.getFloat()) return (Handler)FLOAT_HANDLER; if (metaClass == runtime.getBignum()) return (Handler)BIGNUM_HANDLER; return GENERIC_HANDLER; } /* Generator context */ /** * A class that concentrates all the information that is shared by * generators working on a single session. * *

A session is defined as the process of serializing a single root * object; any handler directly called by container handlers (arrays and * hashes/objects) shares this object with its caller. * *

Note that anything called indirectly (via {@link GENERIC_HANDLER}) * won't be part of the session. */ static class Session { private final ThreadContext context; private GeneratorState state; private IRubyObject possibleState; private RuntimeInfo info; private StringEncoder stringEncoder; private boolean tainted = false; private boolean untrusted = false; Session(ThreadContext context, GeneratorState state) { this.context = context; this.state = state; } Session(ThreadContext context, IRubyObject possibleState) { this.context = context; this.possibleState = possibleState == null || possibleState.isNil() ? null : possibleState; } public ThreadContext getContext() { return context; } public Ruby getRuntime() { return context.getRuntime(); } public GeneratorState getState() { if (state == null) { state = GeneratorState.fromState(context, getInfo(), possibleState); } return state; } public RuntimeInfo getInfo() { if (info == null) info = RuntimeInfo.forRuntime(getRuntime()); return info; } public StringEncoder getStringEncoder() { if (stringEncoder == null) { stringEncoder = new StringEncoder(context, getState().asciiOnly()); } return stringEncoder; } public void infectBy(IRubyObject object) { if (object.isTaint()) tainted = true; if (object.isUntrusted()) untrusted = true; } public T infect(T object) { if (tainted) object.setTaint(true); if (untrusted) object.setUntrusted(true); return object; } } /* Handler base classes */ private static abstract class Handler { /** * Returns an estimative of how much space the serialization of the * given object will take. Used for allocating enough buffer space * before invoking other methods. */ int guessSize(Session session, T object) { return 4; } RubyString generateNew(Session session, T object) { RubyString result; ByteList buffer = new ByteList(guessSize(session, object)); generate(session, object, buffer); result = RubyString.newString(session.getRuntime(), buffer); ThreadContext context = session.getContext(); RuntimeInfo info = session.getInfo(); result.force_encoding(context, info.utf8.get()); return result; } abstract void generate(Session session, T object, ByteList buffer); } /** * A handler that returns a fixed keyword regardless of the passed object. */ private static class KeywordHandler extends Handler { private final ByteList keyword; private KeywordHandler(String keyword) { this.keyword = new ByteList(ByteList.plain(keyword), false); } @Override int guessSize(Session session, T object) { return keyword.length(); } @Override RubyString generateNew(Session session, T object) { return RubyString.newStringShared(session.getRuntime(), keyword); } @Override void generate(Session session, T object, ByteList buffer) { buffer.append(keyword); } } /* Handlers */ static final Handler BIGNUM_HANDLER = new Handler() { @Override void generate(Session session, RubyBignum object, ByteList buffer) { // JRUBY-4751: RubyBignum.to_s() returns generic object // representation (fixed in 1.5, but we maintain backwards // compatibility; call to_s(IRubyObject[]) then buffer.append(((RubyString)object.to_s(IRubyObject.NULL_ARRAY)).getByteList()); } }; static final Handler FIXNUM_HANDLER = new Handler() { @Override void generate(Session session, RubyFixnum object, ByteList buffer) { buffer.append(object.to_s().getByteList()); } }; static final Handler FLOAT_HANDLER = new Handler() { @Override void generate(Session session, RubyFloat object, ByteList buffer) { double value = RubyFloat.num2dbl(object); if (Double.isInfinite(value) || Double.isNaN(value)) { if (!session.getState().allowNaN()) { throw Utils.newException(session.getContext(), Utils.M_GENERATOR_ERROR, object + " not allowed in JSON"); } } buffer.append(((RubyString)object.to_s()).getByteList()); } }; static final Handler ARRAY_HANDLER = new Handler() { @Override int guessSize(Session session, RubyArray object) { GeneratorState state = session.getState(); int depth = state.getDepth(); int perItem = 4 // prealloc + (depth + 1) * state.getIndent().length() // indent + 1 + state.getArrayNl().length(); // ',' arrayNl return 2 + object.size() * perItem; } @Override void generate(Session session, RubyArray object, ByteList buffer) { ThreadContext context = session.getContext(); Ruby runtime = context.getRuntime(); GeneratorState state = session.getState(); int depth = state.increaseDepth(); ByteList indentUnit = state.getIndent(); byte[] shift = Utils.repeat(indentUnit, depth); ByteList arrayNl = state.getArrayNl(); byte[] delim = new byte[1 + arrayNl.length()]; delim[0] = ','; System.arraycopy(arrayNl.unsafeBytes(), arrayNl.begin(), delim, 1, arrayNl.length()); session.infectBy(object); buffer.append((byte)'['); buffer.append(arrayNl); boolean firstItem = true; for (int i = 0, t = object.getLength(); i < t; i++) { IRubyObject element = object.eltInternal(i); session.infectBy(element); if (firstItem) { firstItem = false; } else { buffer.append(delim); } buffer.append(shift); Handler handler = getHandlerFor(runtime, element); handler.generate(session, element, buffer); } state.decreaseDepth(); if (arrayNl.length() != 0) { buffer.append(arrayNl); buffer.append(shift, 0, state.getDepth() * indentUnit.length()); } buffer.append((byte)']'); } }; static final Handler HASH_HANDLER = new Handler() { @Override int guessSize(Session session, RubyHash object) { GeneratorState state = session.getState(); int perItem = 12 // key, colon, comma + (state.getDepth() + 1) * state.getIndent().length() + state.getSpaceBefore().length() + state.getSpace().length(); return 2 + object.size() * perItem; } @Override void generate(final Session session, RubyHash object, final ByteList buffer) { ThreadContext context = session.getContext(); final Ruby runtime = context.getRuntime(); final GeneratorState state = session.getState(); final int depth = state.increaseDepth(); final ByteList objectNl = state.getObjectNl(); final byte[] indent = Utils.repeat(state.getIndent(), depth); final ByteList spaceBefore = state.getSpaceBefore(); final ByteList space = state.getSpace(); buffer.append((byte)'{'); buffer.append(objectNl); object.visitAll(new RubyHash.Visitor() { private boolean firstPair = true; @Override public void visit(IRubyObject key, IRubyObject value) { if (firstPair) { firstPair = false; } else { buffer.append((byte)','); buffer.append(objectNl); } if (objectNl.length() != 0) buffer.append(indent); STRING_HANDLER.generate(session, key.asString(), buffer); session.infectBy(key); buffer.append(spaceBefore); buffer.append((byte)':'); buffer.append(space); Handler valueHandler = getHandlerFor(runtime, value); valueHandler.generate(session, value, buffer); session.infectBy(value); } }); state.decreaseDepth(); if (objectNl.length() != 0) { buffer.append(objectNl); buffer.append(Utils.repeat(state.getIndent(), state.getDepth())); } buffer.append((byte)'}'); } }; static final Handler STRING_HANDLER = new Handler() { @Override int guessSize(Session session, RubyString object) { // for most applications, most strings will be just a set of // printable ASCII characters without any escaping, so let's // just allocate enough space for that + the quotes return 2 + object.getByteList().length(); } @Override void generate(Session session, RubyString object, ByteList buffer) { RuntimeInfo info = session.getInfo(); RubyString src; if (object.encoding(session.getContext()) != info.utf8.get()) { src = (RubyString)object.encode(session.getContext(), info.utf8.get()); } else { src = object; } session.getStringEncoder().encode(src.getByteList(), buffer); } }; static final Handler TRUE_HANDLER = new KeywordHandler("true"); static final Handler FALSE_HANDLER = new KeywordHandler("false"); static final Handler NIL_HANDLER = new KeywordHandler("null"); /** * The default handler (Object#to_json): coerces the object * to string using #to_s, and serializes that string. */ static final Handler OBJECT_HANDLER = new Handler() { @Override RubyString generateNew(Session session, IRubyObject object) { RubyString str = object.asString(); return STRING_HANDLER.generateNew(session, str); } @Override void generate(Session session, IRubyObject object, ByteList buffer) { RubyString str = object.asString(); STRING_HANDLER.generate(session, str, buffer); } }; /** * A handler that simply calls #to_json(state) on the * given object. */ static final Handler GENERIC_HANDLER = new Handler() { @Override RubyString generateNew(Session session, IRubyObject object) { if (object.respondsTo("to_json")) { IRubyObject result = object.callMethod(session.getContext(), "to_json", new IRubyObject[] {session.getState()}); if (result instanceof RubyString) return (RubyString)result; throw session.getRuntime().newTypeError("to_json must return a String"); } else { return OBJECT_HANDLER.generateNew(session, object); } } @Override void generate(Session session, IRubyObject object, ByteList buffer) { RubyString result = generateNew(session, object); buffer.append(result.getByteList()); } }; } ruby-json-2.1.0+dfsg.orig/java/src/json/ext/GeneratorMethods.java0000644000175000017500000002143313113111601024263 0ustar boutilboutil/* * This code is copyrighted work by Daniel Luz . * * Distributed under the Ruby license: https://www.ruby-lang.org/en/about/license.txt */ package json.ext; import java.lang.ref.WeakReference; import org.jruby.Ruby; import org.jruby.RubyArray; import org.jruby.RubyBoolean; import org.jruby.RubyFixnum; import org.jruby.RubyFloat; import org.jruby.RubyHash; import org.jruby.RubyInteger; import org.jruby.RubyModule; import org.jruby.RubyNumeric; import org.jruby.RubyString; import org.jruby.anno.JRubyMethod; import org.jruby.runtime.ThreadContext; import org.jruby.runtime.builtin.IRubyObject; import org.jruby.util.ByteList; /** * A class that populates the * Json::Ext::Generator::GeneratorMethods module. * * @author mernen */ class GeneratorMethods { /** * Populates the given module with all modules and their methods * @param info * @param generatorMethodsModule The module to populate * (normally JSON::Generator::GeneratorMethods) */ static void populate(RuntimeInfo info, RubyModule module) { defineMethods(module, "Array", RbArray.class); defineMethods(module, "FalseClass", RbFalse.class); defineMethods(module, "Float", RbFloat.class); defineMethods(module, "Hash", RbHash.class); defineMethods(module, "Integer", RbInteger.class); defineMethods(module, "NilClass", RbNil.class); defineMethods(module, "Object", RbObject.class); defineMethods(module, "String", RbString.class); defineMethods(module, "TrueClass", RbTrue.class); info.stringExtendModule = new WeakReference(module.defineModuleUnder("String") .defineModuleUnder("Extend")); info.stringExtendModule.get().defineAnnotatedMethods(StringExtend.class); } /** * Convenience method for defining methods on a submodule. * @param parentModule * @param submoduleName * @param klass */ private static void defineMethods(RubyModule parentModule, String submoduleName, Class klass) { RubyModule submodule = parentModule.defineModuleUnder(submoduleName); submodule.defineAnnotatedMethods(klass); } public static class RbHash { @JRubyMethod(rest=true) public static IRubyObject to_json(ThreadContext context, IRubyObject vSelf, IRubyObject[] args) { return Generator.generateJson(context, (RubyHash)vSelf, Generator.HASH_HANDLER, args); } } public static class RbArray { @JRubyMethod(rest=true) public static IRubyObject to_json(ThreadContext context, IRubyObject vSelf, IRubyObject[] args) { return Generator.generateJson(context, (RubyArray)vSelf, Generator.ARRAY_HANDLER, args); } } public static class RbInteger { @JRubyMethod(rest=true) public static IRubyObject to_json(ThreadContext context, IRubyObject vSelf, IRubyObject[] args) { return Generator.generateJson(context, vSelf, args); } } public static class RbFloat { @JRubyMethod(rest=true) public static IRubyObject to_json(ThreadContext context, IRubyObject vSelf, IRubyObject[] args) { return Generator.generateJson(context, (RubyFloat)vSelf, Generator.FLOAT_HANDLER, args); } } public static class RbString { @JRubyMethod(rest=true) public static IRubyObject to_json(ThreadContext context, IRubyObject vSelf, IRubyObject[] args) { return Generator.generateJson(context, (RubyString)vSelf, Generator.STRING_HANDLER, args); } /** * {@link RubyString String}#to_json_raw(*) * *

This method creates a JSON text from the result of a call to * {@link #to_json_raw_object} of this String. */ @JRubyMethod(rest=true) public static IRubyObject to_json_raw(ThreadContext context, IRubyObject vSelf, IRubyObject[] args) { RubyHash obj = toJsonRawObject(context, Utils.ensureString(vSelf)); return Generator.generateJson(context, obj, Generator.HASH_HANDLER, args); } /** * {@link RubyString String}#to_json_raw_object(*) * *

This method creates a raw object Hash, that can be nested into * other data structures and will be unparsed as a raw string. This * method should be used if you want to convert raw strings to JSON * instead of UTF-8 strings, e.g. binary data. */ @JRubyMethod(rest=true) public static IRubyObject to_json_raw_object(ThreadContext context, IRubyObject vSelf, IRubyObject[] args) { return toJsonRawObject(context, Utils.ensureString(vSelf)); } private static RubyHash toJsonRawObject(ThreadContext context, RubyString self) { Ruby runtime = context.getRuntime(); RubyHash result = RubyHash.newHash(runtime); IRubyObject createId = RuntimeInfo.forRuntime(runtime) .jsonModule.get().callMethod(context, "create_id"); result.op_aset(context, createId, self.getMetaClass().to_s()); ByteList bl = self.getByteList(); byte[] uBytes = bl.unsafeBytes(); RubyArray array = runtime.newArray(bl.length()); for (int i = bl.begin(), t = bl.begin() + bl.length(); i < t; i++) { array.store(i, runtime.newFixnum(uBytes[i] & 0xff)); } result.op_aset(context, runtime.newString("raw"), array); return result; } @JRubyMethod(required=1, module=true) public static IRubyObject included(ThreadContext context, IRubyObject vSelf, IRubyObject module) { RuntimeInfo info = RuntimeInfo.forRuntime(context.getRuntime()); return module.callMethod(context, "extend", info.stringExtendModule.get()); } } public static class StringExtend { /** * {@link RubyString String}#json_create(o) * *

Raw Strings are JSON Objects (the raw bytes are stored in an * array for the key "raw"). The Ruby String can be created by this * module method. */ @JRubyMethod(required=1) public static IRubyObject json_create(ThreadContext context, IRubyObject vSelf, IRubyObject vHash) { Ruby runtime = context.getRuntime(); RubyHash o = vHash.convertToHash(); IRubyObject rawData = o.fastARef(runtime.newString("raw")); if (rawData == null) { throw runtime.newArgumentError("\"raw\" value not defined " + "for encoded String"); } RubyArray ary = Utils.ensureArray(rawData); byte[] bytes = new byte[ary.getLength()]; for (int i = 0, t = ary.getLength(); i < t; i++) { IRubyObject element = ary.eltInternal(i); if (element instanceof RubyFixnum) { bytes[i] = (byte)RubyNumeric.fix2long(element); } else { throw runtime.newTypeError(element, runtime.getFixnum()); } } return runtime.newString(new ByteList(bytes, false)); } } public static class RbTrue { @JRubyMethod(rest=true) public static IRubyObject to_json(ThreadContext context, IRubyObject vSelf, IRubyObject[] args) { return Generator.generateJson(context, (RubyBoolean)vSelf, Generator.TRUE_HANDLER, args); } } public static class RbFalse { @JRubyMethod(rest=true) public static IRubyObject to_json(ThreadContext context, IRubyObject vSelf, IRubyObject[] args) { return Generator.generateJson(context, (RubyBoolean)vSelf, Generator.FALSE_HANDLER, args); } } public static class RbNil { @JRubyMethod(rest=true) public static IRubyObject to_json(ThreadContext context, IRubyObject vSelf, IRubyObject[] args) { return Generator.generateJson(context, vSelf, Generator.NIL_HANDLER, args); } } public static class RbObject { @JRubyMethod(rest=true) public static IRubyObject to_json(ThreadContext context, IRubyObject self, IRubyObject[] args) { return RbString.to_json(context, self.asString(), args); } } } ruby-json-2.1.0+dfsg.orig/java/src/json/ext/RuntimeInfo.java0000644000175000017500000001074713113111601023256 0ustar boutilboutil/* * This code is copyrighted work by Daniel Luz . * * Distributed under the Ruby license: https://www.ruby-lang.org/en/about/license.txt */ package json.ext; import java.lang.ref.WeakReference; import java.util.HashMap; import java.util.Map; import java.util.WeakHashMap; import org.jruby.Ruby; import org.jruby.RubyClass; import org.jruby.RubyEncoding; import org.jruby.RubyModule; import org.jruby.runtime.ThreadContext; import org.jruby.runtime.builtin.IRubyObject; final class RuntimeInfo { // since the vast majority of cases runs just one runtime, // we optimize for that private static WeakReference runtime1 = new WeakReference(null); private static RuntimeInfo info1; // store remaining runtimes here (does not include runtime1) private static Map runtimes; // these fields are filled by the service loaders // Use WeakReferences so that RuntimeInfo doesn't indirectly hold a hard reference to // the Ruby runtime object, which would cause memory leaks in the runtimes map above. /** JSON */ WeakReference jsonModule; /** JSON::Ext::Generator::GeneratorMethods::String::Extend */ WeakReference stringExtendModule; /** JSON::Ext::Generator::State */ WeakReference generatorStateClass; /** JSON::SAFE_STATE_PROTOTYPE */ WeakReference safeStatePrototype; final WeakReference utf8; final WeakReference ascii8bit; // other encodings private final Map> encodings; private RuntimeInfo(Ruby runtime) { RubyClass encodingClass = runtime.getEncoding(); if (encodingClass == null) { // 1.8 mode utf8 = ascii8bit = null; encodings = null; } else { ThreadContext context = runtime.getCurrentContext(); utf8 = new WeakReference((RubyEncoding)RubyEncoding.find(context, encodingClass, runtime.newString("utf-8"))); ascii8bit = new WeakReference((RubyEncoding)RubyEncoding.find(context, encodingClass, runtime.newString("ascii-8bit"))); encodings = new HashMap>(); } } static RuntimeInfo initRuntime(Ruby runtime) { synchronized (RuntimeInfo.class) { if (runtime1.get() == runtime) { return info1; } else if (runtime1.get() == null) { runtime1 = new WeakReference(runtime); info1 = new RuntimeInfo(runtime); return info1; } else { if (runtimes == null) { runtimes = new WeakHashMap(1); } RuntimeInfo cache = runtimes.get(runtime); if (cache == null) { cache = new RuntimeInfo(runtime); runtimes.put(runtime, cache); } return cache; } } } public static RuntimeInfo forRuntime(Ruby runtime) { synchronized (RuntimeInfo.class) { if (runtime1.get() == runtime) return info1; RuntimeInfo cache = null; if (runtimes != null) cache = runtimes.get(runtime); assert cache != null : "Runtime given has not initialized JSON::Ext"; return cache; } } public RubyEncoding getEncoding(ThreadContext context, String name) { synchronized (encodings) { WeakReference encoding = encodings.get(name); if (encoding == null) { Ruby runtime = context.getRuntime(); encoding = new WeakReference((RubyEncoding)RubyEncoding.find(context, runtime.getEncoding(), runtime.newString(name))); encodings.put(name, encoding); } return encoding.get(); } } public GeneratorState getSafeStatePrototype(ThreadContext context) { if (safeStatePrototype == null) { IRubyObject value = jsonModule.get().getConstant("SAFE_STATE_PROTOTYPE"); if (!(value instanceof GeneratorState)) { throw context.getRuntime().newTypeError(value, generatorStateClass.get()); } safeStatePrototype = new WeakReference((GeneratorState)value); } return safeStatePrototype.get(); } } ruby-json-2.1.0+dfsg.orig/java/src/json/ext/ByteListTranscoder.java0000644000175000017500000001134613113111601024577 0ustar boutilboutil/* * This code is copyrighted work by Daniel Luz . * * Distributed under the Ruby license: https://www.ruby-lang.org/en/about/license.txt */ package json.ext; import org.jruby.exceptions.RaiseException; import org.jruby.runtime.ThreadContext; import org.jruby.util.ByteList; /** * A class specialized in transcoding a certain String format into another, * using UTF-8 ByteLists as both input and output. */ abstract class ByteListTranscoder { protected final ThreadContext context; protected ByteList src; protected int srcEnd; /** Position where the last read character started */ protected int charStart; /** Position of the next character to read */ protected int pos; private ByteList out; /** * When a character that can be copied straight into the output is found, * its index is stored on this variable, and copying is delayed until * the sequence of characters that can be copied ends. * *

The variable stores -1 when not in a plain sequence. */ private int quoteStart = -1; protected ByteListTranscoder(ThreadContext context) { this.context = context; } protected void init(ByteList src, ByteList out) { this.init(src, 0, src.length(), out); } protected void init(ByteList src, int start, int end, ByteList out) { this.src = src; this.pos = start; this.charStart = start; this.srcEnd = end; this.out = out; } /** * Returns whether there are any characters left to be read. */ protected boolean hasNext() { return pos < srcEnd; } /** * Returns the next character in the buffer. */ private char next() { return src.charAt(pos++); } /** * Reads an UTF-8 character from the input and returns its code point, * while advancing the input position. * *

Raises an {@link #invalidUtf8()} exception if an invalid byte * is found. */ protected int readUtf8Char() { charStart = pos; char head = next(); if (head <= 0x7f) { // 0b0xxxxxxx (ASCII) return head; } if (head <= 0xbf) { // 0b10xxxxxx throw invalidUtf8(); // tail byte with no head } if (head <= 0xdf) { // 0b110xxxxx ensureMin(1); int cp = ((head & 0x1f) << 6) | nextPart(); if (cp < 0x0080) throw invalidUtf8(); return cp; } if (head <= 0xef) { // 0b1110xxxx ensureMin(2); int cp = ((head & 0x0f) << 12) | (nextPart() << 6) | nextPart(); if (cp < 0x0800) throw invalidUtf8(); return cp; } if (head <= 0xf7) { // 0b11110xxx ensureMin(3); int cp = ((head & 0x07) << 18) | (nextPart() << 12) | (nextPart() << 6) | nextPart(); if (!Character.isValidCodePoint(cp)) throw invalidUtf8(); return cp; } // 0b11111xxx? throw invalidUtf8(); } /** * Throws a GeneratorError if the input list doesn't have at least this * many bytes left. */ protected void ensureMin(int n) { if (pos + n > srcEnd) throw incompleteUtf8(); } /** * Reads the next byte of a multi-byte UTF-8 character and returns its * contents (lower 6 bits). * *

Throws a GeneratorError if the byte is not a valid tail. */ private int nextPart() { char c = next(); // tail bytes must be 0b10xxxxxx if ((c & 0xc0) != 0x80) throw invalidUtf8(); return c & 0x3f; } protected void quoteStart() { if (quoteStart == -1) quoteStart = charStart; } /** * When in a sequence of characters that can be copied directly, * interrupts the sequence and copies it to the output buffer. * * @param endPos The offset until which the direct character quoting should * occur. You may pass {@link #pos} to quote until the most * recently read character, or {@link #charStart} to quote * until the character before it. */ protected void quoteStop(int endPos) { if (quoteStart != -1) { out.append(src, quoteStart, endPos - quoteStart); quoteStart = -1; } } protected void append(int b) { out.append(b); } protected void append(byte[] origin, int start, int length) { out.append(origin, start, length); } protected abstract RaiseException invalidUtf8(); protected RaiseException incompleteUtf8() { return invalidUtf8(); } } ruby-json-2.1.0+dfsg.orig/java/src/json/ext/Utils.java0000644000175000017500000000632213113111601022111 0ustar boutilboutil/* * This code is copyrighted work by Daniel Luz . * * Distributed under the Ruby license: https://www.ruby-lang.org/en/about/license.txt */ package json.ext; import org.jruby.Ruby; import org.jruby.RubyArray; import org.jruby.RubyClass; import org.jruby.RubyException; import org.jruby.RubyHash; import org.jruby.RubyString; import org.jruby.exceptions.RaiseException; import org.jruby.runtime.Block; import org.jruby.runtime.ThreadContext; import org.jruby.runtime.builtin.IRubyObject; import org.jruby.util.ByteList; /** * Library of miscellaneous utility functions */ final class Utils { public static final String M_GENERATOR_ERROR = "GeneratorError"; public static final String M_NESTING_ERROR = "NestingError"; public static final String M_PARSER_ERROR = "ParserError"; private Utils() { throw new RuntimeException(); } /** * Safe {@link RubyArray} type-checking. * Returns the given object if it is an Array, * or throws an exception if not. * @param object The object to test * @return The given object if it is an Array * @throws RaiseException TypeError if the object is not * of the expected type */ static RubyArray ensureArray(IRubyObject object) throws RaiseException { if (object instanceof RubyArray) return (RubyArray)object; Ruby runtime = object.getRuntime(); throw runtime.newTypeError(object, runtime.getArray()); } static RubyHash ensureHash(IRubyObject object) throws RaiseException { if (object instanceof RubyHash) return (RubyHash)object; Ruby runtime = object.getRuntime(); throw runtime.newTypeError(object, runtime.getHash()); } static RubyString ensureString(IRubyObject object) throws RaiseException { if (object instanceof RubyString) return (RubyString)object; Ruby runtime = object.getRuntime(); throw runtime.newTypeError(object, runtime.getString()); } static RaiseException newException(ThreadContext context, String className, String message) { return newException(context, className, context.getRuntime().newString(message)); } static RaiseException newException(ThreadContext context, String className, RubyString message) { RuntimeInfo info = RuntimeInfo.forRuntime(context.getRuntime()); RubyClass klazz = info.jsonModule.get().getClass(className); RubyException excptn = (RubyException)klazz.newInstance(context, new IRubyObject[] {message}, Block.NULL_BLOCK); return new RaiseException(excptn); } static byte[] repeat(ByteList a, int n) { return repeat(a.unsafeBytes(), a.begin(), a.length(), n); } static byte[] repeat(byte[] a, int begin, int length, int n) { if (length == 0) return ByteList.NULL_ARRAY; int resultLen = length * n; byte[] result = new byte[resultLen]; for (int pos = 0; pos < resultLen; pos += length) { System.arraycopy(a, begin, result, pos, length); } return result; } } ruby-json-2.1.0+dfsg.orig/ext/0000755000175000017500000000000013113111601015462 5ustar boutilboutilruby-json-2.1.0+dfsg.orig/ext/json/0000755000175000017500000000000013113111601016433 5ustar boutilboutilruby-json-2.1.0+dfsg.orig/ext/json/ext/0000755000175000017500000000000013113111601017233 5ustar boutilboutilruby-json-2.1.0+dfsg.orig/ext/json/ext/fbuffer/0000755000175000017500000000000013113111601020652 5ustar boutilboutilruby-json-2.1.0+dfsg.orig/ext/json/ext/fbuffer/fbuffer.h0000644000175000017500000001027113113111601022443 0ustar boutilboutil #ifndef _FBUFFER_H_ #define _FBUFFER_H_ #include "ruby.h" #ifndef RHASH_SIZE #define RHASH_SIZE(hsh) (RHASH(hsh)->tbl->num_entries) #endif #ifndef RFLOAT_VALUE #define RFLOAT_VALUE(val) (RFLOAT(val)->value) #endif #ifndef RARRAY_LEN #define RARRAY_LEN(ARRAY) RARRAY(ARRAY)->len #endif #ifndef RSTRING_PTR #define RSTRING_PTR(string) RSTRING(string)->ptr #endif #ifndef RSTRING_LEN #define RSTRING_LEN(string) RSTRING(string)->len #endif #ifdef PRIsVALUE # define RB_OBJ_CLASSNAME(obj) rb_obj_class(obj) # define RB_OBJ_STRING(obj) (obj) #else # define PRIsVALUE "s" # define RB_OBJ_CLASSNAME(obj) rb_obj_classname(obj) # define RB_OBJ_STRING(obj) StringValueCStr(obj) #endif #ifdef HAVE_RUBY_ENCODING_H #include "ruby/encoding.h" #define FORCE_UTF8(obj) rb_enc_associate((obj), rb_utf8_encoding()) #else #define FORCE_UTF8(obj) #endif /* We don't need to guard objects for rbx, so let's do nothing at all. */ #ifndef RB_GC_GUARD #define RB_GC_GUARD(object) #endif typedef struct FBufferStruct { unsigned long initial_length; char *ptr; unsigned long len; unsigned long capa; } FBuffer; #define FBUFFER_INITIAL_LENGTH_DEFAULT 1024 #define FBUFFER_PTR(fb) (fb->ptr) #define FBUFFER_LEN(fb) (fb->len) #define FBUFFER_CAPA(fb) (fb->capa) #define FBUFFER_PAIR(fb) FBUFFER_PTR(fb), FBUFFER_LEN(fb) static FBuffer *fbuffer_alloc(unsigned long initial_length); static void fbuffer_free(FBuffer *fb); static void fbuffer_clear(FBuffer *fb); static void fbuffer_append(FBuffer *fb, const char *newstr, unsigned long len); #ifdef JSON_GENERATOR static void fbuffer_append_long(FBuffer *fb, long number); #endif static void fbuffer_append_char(FBuffer *fb, char newchr); #ifdef JSON_GENERATOR static FBuffer *fbuffer_dup(FBuffer *fb); static VALUE fbuffer_to_s(FBuffer *fb); #endif static FBuffer *fbuffer_alloc(unsigned long initial_length) { FBuffer *fb; if (initial_length <= 0) initial_length = FBUFFER_INITIAL_LENGTH_DEFAULT; fb = ALLOC(FBuffer); memset((void *) fb, 0, sizeof(FBuffer)); fb->initial_length = initial_length; return fb; } static void fbuffer_free(FBuffer *fb) { if (fb->ptr) ruby_xfree(fb->ptr); ruby_xfree(fb); } static void fbuffer_clear(FBuffer *fb) { fb->len = 0; } static void fbuffer_inc_capa(FBuffer *fb, unsigned long requested) { unsigned long required; if (!fb->ptr) { fb->ptr = ALLOC_N(char, fb->initial_length); fb->capa = fb->initial_length; } for (required = fb->capa; requested > required - fb->len; required <<= 1); if (required > fb->capa) { REALLOC_N(fb->ptr, char, required); fb->capa = required; } } static void fbuffer_append(FBuffer *fb, const char *newstr, unsigned long len) { if (len > 0) { fbuffer_inc_capa(fb, len); MEMCPY(fb->ptr + fb->len, newstr, char, len); fb->len += len; } } #ifdef JSON_GENERATOR static void fbuffer_append_str(FBuffer *fb, VALUE str) { const char *newstr = StringValuePtr(str); unsigned long len = RSTRING_LEN(str); RB_GC_GUARD(str); fbuffer_append(fb, newstr, len); } #endif static void fbuffer_append_char(FBuffer *fb, char newchr) { fbuffer_inc_capa(fb, 1); *(fb->ptr + fb->len) = newchr; fb->len++; } #ifdef JSON_GENERATOR static void freverse(char *start, char *end) { char c; while (end > start) { c = *end, *end-- = *start, *start++ = c; } } static long fltoa(long number, char *buf) { static char digits[] = "0123456789"; long sign = number; char* tmp = buf; if (sign < 0) number = -number; do *tmp++ = digits[number % 10]; while (number /= 10); if (sign < 0) *tmp++ = '-'; freverse(buf, tmp - 1); return tmp - buf; } static void fbuffer_append_long(FBuffer *fb, long number) { char buf[20]; unsigned long len = fltoa(number, buf); fbuffer_append(fb, buf, len); } static FBuffer *fbuffer_dup(FBuffer *fb) { unsigned long len = fb->len; FBuffer *result; result = fbuffer_alloc(len); fbuffer_append(result, FBUFFER_PAIR(fb)); return result; } static VALUE fbuffer_to_s(FBuffer *fb) { VALUE result = rb_str_new(FBUFFER_PTR(fb), FBUFFER_LEN(fb)); fbuffer_free(fb); FORCE_UTF8(result); return result; } #endif #endif ruby-json-2.1.0+dfsg.orig/ext/json/ext/parser/0000755000175000017500000000000013113111601020527 5ustar boutilboutilruby-json-2.1.0+dfsg.orig/ext/json/ext/parser/depend0000644000175000017500000000007313113111601021711 0ustar boutilboutilparser.o: parser.c parser.h $(srcdir)/../fbuffer/fbuffer.h ruby-json-2.1.0+dfsg.orig/ext/json/ext/parser/parser.rl0000644000175000017500000006262713113111601022377 0ustar boutilboutil#include "../fbuffer/fbuffer.h" #include "parser.h" #if defined HAVE_RUBY_ENCODING_H # define EXC_ENCODING rb_utf8_encoding(), # ifndef HAVE_RB_ENC_RAISE static void enc_raise(rb_encoding *enc, VALUE exc, const char *fmt, ...) { va_list args; VALUE mesg; va_start(args, fmt); mesg = rb_enc_vsprintf(enc, fmt, args); va_end(args); rb_exc_raise(rb_exc_new3(exc, mesg)); } # define rb_enc_raise enc_raise # endif #else # define EXC_ENCODING /* nothing */ # define rb_enc_raise rb_raise #endif /* unicode */ static const char digit_values[256] = { -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, -1, -1, -1, -1, -1, -1, -1, 10, 11, 12, 13, 14, 15, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 10, 11, 12, 13, 14, 15, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1 }; static UTF32 unescape_unicode(const unsigned char *p) { char b; UTF32 result = 0; b = digit_values[p[0]]; if (b < 0) return UNI_REPLACEMENT_CHAR; result = (result << 4) | (unsigned char)b; b = digit_values[p[1]]; if (b < 0) return UNI_REPLACEMENT_CHAR; result = (result << 4) | (unsigned char)b; b = digit_values[p[2]]; if (b < 0) return UNI_REPLACEMENT_CHAR; result = (result << 4) | (unsigned char)b; b = digit_values[p[3]]; if (b < 0) return UNI_REPLACEMENT_CHAR; result = (result << 4) | (unsigned char)b; return result; } static int convert_UTF32_to_UTF8(char *buf, UTF32 ch) { int len = 1; if (ch <= 0x7F) { buf[0] = (char) ch; } else if (ch <= 0x07FF) { buf[0] = (char) ((ch >> 6) | 0xC0); buf[1] = (char) ((ch & 0x3F) | 0x80); len++; } else if (ch <= 0xFFFF) { buf[0] = (char) ((ch >> 12) | 0xE0); buf[1] = (char) (((ch >> 6) & 0x3F) | 0x80); buf[2] = (char) ((ch & 0x3F) | 0x80); len += 2; } else if (ch <= 0x1fffff) { buf[0] =(char) ((ch >> 18) | 0xF0); buf[1] =(char) (((ch >> 12) & 0x3F) | 0x80); buf[2] =(char) (((ch >> 6) & 0x3F) | 0x80); buf[3] =(char) ((ch & 0x3F) | 0x80); len += 3; } else { buf[0] = '?'; } return len; } static VALUE mJSON, mExt, cParser, eParserError, eNestingError; static VALUE CNaN, CInfinity, CMinusInfinity; static ID i_json_creatable_p, i_json_create, i_create_id, i_create_additions, i_chr, i_max_nesting, i_allow_nan, i_symbolize_names, i_object_class, i_array_class, i_decimal_class, i_key_p, i_deep_const_get, i_match, i_match_string, i_aset, i_aref, i_leftshift, i_new; %%{ machine JSON_common; cr = '\n'; cr_neg = [^\n]; ws = [ \t\r\n]; c_comment = '/*' ( any* - (any* '*/' any* ) ) '*/'; cpp_comment = '//' cr_neg* cr; comment = c_comment | cpp_comment; ignore = ws | comment; name_separator = ':'; value_separator = ','; Vnull = 'null'; Vfalse = 'false'; Vtrue = 'true'; VNaN = 'NaN'; VInfinity = 'Infinity'; VMinusInfinity = '-Infinity'; begin_value = [nft\"\-\[\{NI] | digit; begin_object = '{'; end_object = '}'; begin_array = '['; end_array = ']'; begin_string = '"'; begin_name = begin_string; begin_number = digit | '-'; }%% %%{ machine JSON_object; include JSON_common; write data; action parse_value { VALUE v = Qnil; char *np = JSON_parse_value(json, fpc, pe, &v, current_nesting); if (np == NULL) { fhold; fbreak; } else { if (NIL_P(json->object_class)) { rb_hash_aset(*result, last_name, v); } else { rb_funcall(*result, i_aset, 2, last_name, v); } fexec np; } } action parse_name { char *np; json->parsing_name = 1; np = JSON_parse_string(json, fpc, pe, &last_name); json->parsing_name = 0; if (np == NULL) { fhold; fbreak; } else fexec np; } action exit { fhold; fbreak; } pair = ignore* begin_name >parse_name ignore* name_separator ignore* begin_value >parse_value; next_pair = ignore* value_separator pair; main := ( begin_object (pair (next_pair)*)? ignore* end_object ) @exit; }%% static char *JSON_parse_object(JSON_Parser *json, char *p, char *pe, VALUE *result, int current_nesting) { int cs = EVIL; VALUE last_name = Qnil; VALUE object_class = json->object_class; if (json->max_nesting && current_nesting > json->max_nesting) { rb_raise(eNestingError, "nesting of %d is too deep", current_nesting); } *result = NIL_P(object_class) ? rb_hash_new() : rb_class_new_instance(0, 0, object_class); %% write init; %% write exec; if (cs >= JSON_object_first_final) { if (json->create_additions) { VALUE klassname; if (NIL_P(json->object_class)) { klassname = rb_hash_aref(*result, json->create_id); } else { klassname = rb_funcall(*result, i_aref, 1, json->create_id); } if (!NIL_P(klassname)) { VALUE klass = rb_funcall(mJSON, i_deep_const_get, 1, klassname); if (RTEST(rb_funcall(klass, i_json_creatable_p, 0))) { *result = rb_funcall(klass, i_json_create, 1, *result); } } } return p + 1; } else { return NULL; } } %%{ machine JSON_value; include JSON_common; write data; action parse_null { *result = Qnil; } action parse_false { *result = Qfalse; } action parse_true { *result = Qtrue; } action parse_nan { if (json->allow_nan) { *result = CNaN; } else { rb_enc_raise(EXC_ENCODING eParserError, "%u: unexpected token at '%s'", __LINE__, p - 2); } } action parse_infinity { if (json->allow_nan) { *result = CInfinity; } else { rb_enc_raise(EXC_ENCODING eParserError, "%u: unexpected token at '%s'", __LINE__, p - 8); } } action parse_string { char *np = JSON_parse_string(json, fpc, pe, result); if (np == NULL) { fhold; fbreak; } else fexec np; } action parse_number { char *np; if(pe > fpc + 8 && !strncmp(MinusInfinity, fpc, 9)) { if (json->allow_nan) { *result = CMinusInfinity; fexec p + 10; fhold; fbreak; } else { rb_enc_raise(EXC_ENCODING eParserError, "%u: unexpected token at '%s'", __LINE__, p); } } np = JSON_parse_float(json, fpc, pe, result); if (np != NULL) fexec np; np = JSON_parse_integer(json, fpc, pe, result); if (np != NULL) fexec np; fhold; fbreak; } action parse_array { char *np; np = JSON_parse_array(json, fpc, pe, result, current_nesting + 1); if (np == NULL) { fhold; fbreak; } else fexec np; } action parse_object { char *np; np = JSON_parse_object(json, fpc, pe, result, current_nesting + 1); if (np == NULL) { fhold; fbreak; } else fexec np; } action exit { fhold; fbreak; } main := ignore* ( Vnull @parse_null | Vfalse @parse_false | Vtrue @parse_true | VNaN @parse_nan | VInfinity @parse_infinity | begin_number >parse_number | begin_string >parse_string | begin_array >parse_array | begin_object >parse_object ) ignore* %*exit; }%% static char *JSON_parse_value(JSON_Parser *json, char *p, char *pe, VALUE *result, int current_nesting) { int cs = EVIL; %% write init; %% write exec; if (cs >= JSON_value_first_final) { return p; } else { return NULL; } } %%{ machine JSON_integer; write data; action exit { fhold; fbreak; } main := '-'? ('0' | [1-9][0-9]*) (^[0-9]? @exit); }%% static char *JSON_parse_integer(JSON_Parser *json, char *p, char *pe, VALUE *result) { int cs = EVIL; %% write init; json->memo = p; %% write exec; if (cs >= JSON_integer_first_final) { long len = p - json->memo; fbuffer_clear(json->fbuffer); fbuffer_append(json->fbuffer, json->memo, len); fbuffer_append_char(json->fbuffer, '\0'); *result = rb_cstr2inum(FBUFFER_PTR(json->fbuffer), 10); return p + 1; } else { return NULL; } } %%{ machine JSON_float; include JSON_common; write data; action exit { fhold; fbreak; } main := '-'? ( (('0' | [1-9][0-9]*) '.' [0-9]+ ([Ee] [+\-]?[0-9]+)?) | (('0' | [1-9][0-9]*) ([Ee] [+\-]?[0-9]+)) ) (^[0-9Ee.\-]? @exit ); }%% static char *JSON_parse_float(JSON_Parser *json, char *p, char *pe, VALUE *result) { int cs = EVIL; %% write init; json->memo = p; %% write exec; if (cs >= JSON_float_first_final) { long len = p - json->memo; fbuffer_clear(json->fbuffer); fbuffer_append(json->fbuffer, json->memo, len); fbuffer_append_char(json->fbuffer, '\0'); if (NIL_P(json->decimal_class)) { *result = rb_float_new(rb_cstr_to_dbl(FBUFFER_PTR(json->fbuffer), 1)); } else { VALUE text; text = rb_str_new2(FBUFFER_PTR(json->fbuffer)); *result = rb_funcall(json->decimal_class, i_new, 1, text); } return p + 1; } else { return NULL; } } %%{ machine JSON_array; include JSON_common; write data; action parse_value { VALUE v = Qnil; char *np = JSON_parse_value(json, fpc, pe, &v, current_nesting); if (np == NULL) { fhold; fbreak; } else { if (NIL_P(json->array_class)) { rb_ary_push(*result, v); } else { rb_funcall(*result, i_leftshift, 1, v); } fexec np; } } action exit { fhold; fbreak; } next_element = value_separator ignore* begin_value >parse_value; main := begin_array ignore* ((begin_value >parse_value ignore*) (ignore* next_element ignore*)*)? end_array @exit; }%% static char *JSON_parse_array(JSON_Parser *json, char *p, char *pe, VALUE *result, int current_nesting) { int cs = EVIL; VALUE array_class = json->array_class; if (json->max_nesting && current_nesting > json->max_nesting) { rb_raise(eNestingError, "nesting of %d is too deep", current_nesting); } *result = NIL_P(array_class) ? rb_ary_new() : rb_class_new_instance(0, 0, array_class); %% write init; %% write exec; if(cs >= JSON_array_first_final) { return p + 1; } else { rb_enc_raise(EXC_ENCODING eParserError, "%u: unexpected token at '%s'", __LINE__, p); return NULL; } } static VALUE json_string_unescape(VALUE result, char *string, char *stringEnd) { char *p = string, *pe = string, *unescape; int unescape_len; char buf[4]; while (pe < stringEnd) { if (*pe == '\\') { unescape = (char *) "?"; unescape_len = 1; if (pe > p) rb_str_buf_cat(result, p, pe - p); switch (*++pe) { case 'n': unescape = (char *) "\n"; break; case 'r': unescape = (char *) "\r"; break; case 't': unescape = (char *) "\t"; break; case '"': unescape = (char *) "\""; break; case '\\': unescape = (char *) "\\"; break; case 'b': unescape = (char *) "\b"; break; case 'f': unescape = (char *) "\f"; break; case 'u': if (pe > stringEnd - 4) { rb_enc_raise( EXC_ENCODING eParserError, "%u: incomplete unicode character escape sequence at '%s'", __LINE__, p ); } else { UTF32 ch = unescape_unicode((unsigned char *) ++pe); pe += 3; if (UNI_SUR_HIGH_START == (ch & 0xFC00)) { pe++; if (pe > stringEnd - 6) { rb_enc_raise( EXC_ENCODING eParserError, "%u: incomplete surrogate pair at '%s'", __LINE__, p ); } if (pe[0] == '\\' && pe[1] == 'u') { UTF32 sur = unescape_unicode((unsigned char *) pe + 2); ch = (((ch & 0x3F) << 10) | ((((ch >> 6) & 0xF) + 1) << 16) | (sur & 0x3FF)); pe += 5; } else { unescape = (char *) "?"; break; } } unescape_len = convert_UTF32_to_UTF8(buf, ch); unescape = buf; } break; default: p = pe; continue; } rb_str_buf_cat(result, unescape, unescape_len); p = ++pe; } else { pe++; } } rb_str_buf_cat(result, p, pe - p); return result; } %%{ machine JSON_string; include JSON_common; write data; action parse_string { *result = json_string_unescape(*result, json->memo + 1, p); if (NIL_P(*result)) { fhold; fbreak; } else { FORCE_UTF8(*result); fexec p + 1; } } action exit { fhold; fbreak; } main := '"' ((^([\"\\] | 0..0x1f) | '\\'[\"\\/bfnrt] | '\\u'[0-9a-fA-F]{4} | '\\'^([\"\\/bfnrtu]|0..0x1f))* %parse_string) '"' @exit; }%% static int match_i(VALUE regexp, VALUE klass, VALUE memo) { if (regexp == Qundef) return ST_STOP; if (RTEST(rb_funcall(klass, i_json_creatable_p, 0)) && RTEST(rb_funcall(regexp, i_match, 1, rb_ary_entry(memo, 0)))) { rb_ary_push(memo, klass); return ST_STOP; } return ST_CONTINUE; } static char *JSON_parse_string(JSON_Parser *json, char *p, char *pe, VALUE *result) { int cs = EVIL; VALUE match_string; *result = rb_str_buf_new(0); %% write init; json->memo = p; %% write exec; if (json->create_additions && RTEST(match_string = json->match_string)) { VALUE klass; VALUE memo = rb_ary_new2(2); rb_ary_push(memo, *result); rb_hash_foreach(match_string, match_i, memo); klass = rb_ary_entry(memo, 1); if (RTEST(klass)) { *result = rb_funcall(klass, i_json_create, 1, *result); } } if (json->symbolize_names && json->parsing_name) { *result = rb_str_intern(*result); } else { rb_str_resize(*result, RSTRING_LEN(*result)); } if (cs >= JSON_string_first_final) { return p + 1; } else { return NULL; } } /* * Document-class: JSON::Ext::Parser * * This is the JSON parser implemented as a C extension. It can be configured * to be used by setting * * JSON.parser = JSON::Ext::Parser * * with the method parser= in JSON. * */ static VALUE convert_encoding(VALUE source) { #ifdef HAVE_RUBY_ENCODING_H rb_encoding *enc = rb_enc_get(source); if (enc == rb_ascii8bit_encoding()) { if (OBJ_FROZEN(source)) { source = rb_str_dup(source); } FORCE_UTF8(source); } else { source = rb_str_conv_enc(source, rb_enc_get(source), rb_utf8_encoding()); } #endif return source; } /* * call-seq: new(source, opts => {}) * * Creates a new JSON::Ext::Parser instance for the string _source_. * * Creates a new JSON::Ext::Parser instance for the string _source_. * * It will be configured by the _opts_ hash. _opts_ can have the following * keys: * * _opts_ can have the following keys: * * *max_nesting*: The maximum depth of nesting allowed in the parsed data * structures. Disable depth checking with :max_nesting => false|nil|0, it * defaults to 100. * * *allow_nan*: If set to true, allow NaN, Infinity and -Infinity in * defiance of RFC 4627 to be parsed by the Parser. This option defaults to * false. * * *symbolize_names*: If set to true, returns symbols for the names * (keys) in a JSON object. Otherwise strings are returned, which is * also the default. It's not possible to use this option in * conjunction with the *create_additions* option. * * *create_additions*: If set to false, the Parser doesn't create * additions even if a matching class and create_id was found. This option * defaults to false. * * *object_class*: Defaults to Hash * * *array_class*: Defaults to Array */ static VALUE cParser_initialize(int argc, VALUE *argv, VALUE self) { VALUE source, opts; GET_PARSER_INIT; if (json->Vsource) { rb_raise(rb_eTypeError, "already initialized instance"); } #ifdef HAVE_RB_SCAN_ARGS_OPTIONAL_HASH rb_scan_args(argc, argv, "1:", &source, &opts); #else rb_scan_args(argc, argv, "11", &source, &opts); #endif if (!NIL_P(opts)) { #ifndef HAVE_RB_SCAN_ARGS_OPTIONAL_HASH opts = rb_convert_type(opts, T_HASH, "Hash", "to_hash"); if (NIL_P(opts)) { rb_raise(rb_eArgError, "opts needs to be like a hash"); } else { #endif VALUE tmp = ID2SYM(i_max_nesting); if (option_given_p(opts, tmp)) { VALUE max_nesting = rb_hash_aref(opts, tmp); if (RTEST(max_nesting)) { Check_Type(max_nesting, T_FIXNUM); json->max_nesting = FIX2INT(max_nesting); } else { json->max_nesting = 0; } } else { json->max_nesting = 100; } tmp = ID2SYM(i_allow_nan); if (option_given_p(opts, tmp)) { json->allow_nan = RTEST(rb_hash_aref(opts, tmp)) ? 1 : 0; } else { json->allow_nan = 0; } tmp = ID2SYM(i_symbolize_names); if (option_given_p(opts, tmp)) { json->symbolize_names = RTEST(rb_hash_aref(opts, tmp)) ? 1 : 0; } else { json->symbolize_names = 0; } tmp = ID2SYM(i_create_additions); if (option_given_p(opts, tmp)) { json->create_additions = RTEST(rb_hash_aref(opts, tmp)); } else { json->create_additions = 0; } if (json->symbolize_names && json->create_additions) { rb_raise(rb_eArgError, "options :symbolize_names and :create_additions cannot be " " used in conjunction"); } tmp = ID2SYM(i_create_id); if (option_given_p(opts, tmp)) { json->create_id = rb_hash_aref(opts, tmp); } else { json->create_id = rb_funcall(mJSON, i_create_id, 0); } tmp = ID2SYM(i_object_class); if (option_given_p(opts, tmp)) { json->object_class = rb_hash_aref(opts, tmp); } else { json->object_class = Qnil; } tmp = ID2SYM(i_array_class); if (option_given_p(opts, tmp)) { json->array_class = rb_hash_aref(opts, tmp); } else { json->array_class = Qnil; } tmp = ID2SYM(i_decimal_class); if (option_given_p(opts, tmp)) { json->decimal_class = rb_hash_aref(opts, tmp); } else { json->decimal_class = Qnil; } tmp = ID2SYM(i_match_string); if (option_given_p(opts, tmp)) { VALUE match_string = rb_hash_aref(opts, tmp); json->match_string = RTEST(match_string) ? match_string : Qnil; } else { json->match_string = Qnil; } #ifndef HAVE_RB_SCAN_ARGS_OPTIONAL_HASH } #endif } else { json->max_nesting = 100; json->allow_nan = 0; json->create_additions = 1; json->create_id = rb_funcall(mJSON, i_create_id, 0); json->object_class = Qnil; json->array_class = Qnil; json->decimal_class = Qnil; } source = convert_encoding(StringValue(source)); StringValue(source); json->len = RSTRING_LEN(source); json->source = RSTRING_PTR(source);; json->Vsource = source; return self; } %%{ machine JSON; write data; include JSON_common; action parse_value { char *np = JSON_parse_value(json, fpc, pe, &result, 0); if (np == NULL) { fhold; fbreak; } else fexec np; } main := ignore* ( begin_value >parse_value ) ignore*; }%% /* * call-seq: parse() * * Parses the current JSON text _source_ and returns the complete data * structure as a result. */ static VALUE cParser_parse(VALUE self) { char *p, *pe; int cs = EVIL; VALUE result = Qnil; GET_PARSER; %% write init; p = json->source; pe = p + json->len; %% write exec; if (cs >= JSON_first_final && p == pe) { return result; } else { rb_enc_raise(EXC_ENCODING eParserError, "%u: unexpected token at '%s'", __LINE__, p); return Qnil; } } static void JSON_mark(void *ptr) { JSON_Parser *json = ptr; rb_gc_mark_maybe(json->Vsource); rb_gc_mark_maybe(json->create_id); rb_gc_mark_maybe(json->object_class); rb_gc_mark_maybe(json->array_class); rb_gc_mark_maybe(json->decimal_class); rb_gc_mark_maybe(json->match_string); } static void JSON_free(void *ptr) { JSON_Parser *json = ptr; fbuffer_free(json->fbuffer); ruby_xfree(json); } static size_t JSON_memsize(const void *ptr) { const JSON_Parser *json = ptr; return sizeof(*json) + FBUFFER_CAPA(json->fbuffer); } #ifdef NEW_TYPEDDATA_WRAPPER static const rb_data_type_t JSON_Parser_type = { "JSON/Parser", {JSON_mark, JSON_free, JSON_memsize,}, #ifdef RUBY_TYPED_FREE_IMMEDIATELY 0, 0, RUBY_TYPED_FREE_IMMEDIATELY, #endif }; #endif static VALUE cJSON_parser_s_allocate(VALUE klass) { JSON_Parser *json; VALUE obj = TypedData_Make_Struct(klass, JSON_Parser, &JSON_Parser_type, json); json->fbuffer = fbuffer_alloc(0); return obj; } /* * call-seq: source() * * Returns a copy of the current _source_ string, that was used to construct * this Parser. */ static VALUE cParser_source(VALUE self) { GET_PARSER; return rb_str_dup(json->Vsource); } void Init_parser(void) { rb_require("json/common"); mJSON = rb_define_module("JSON"); mExt = rb_define_module_under(mJSON, "Ext"); cParser = rb_define_class_under(mExt, "Parser", rb_cObject); eParserError = rb_path2class("JSON::ParserError"); eNestingError = rb_path2class("JSON::NestingError"); rb_define_alloc_func(cParser, cJSON_parser_s_allocate); rb_define_method(cParser, "initialize", cParser_initialize, -1); rb_define_method(cParser, "parse", cParser_parse, 0); rb_define_method(cParser, "source", cParser_source, 0); CNaN = rb_const_get(mJSON, rb_intern("NaN")); CInfinity = rb_const_get(mJSON, rb_intern("Infinity")); CMinusInfinity = rb_const_get(mJSON, rb_intern("MinusInfinity")); i_json_creatable_p = rb_intern("json_creatable?"); i_json_create = rb_intern("json_create"); i_create_id = rb_intern("create_id"); i_create_additions = rb_intern("create_additions"); i_chr = rb_intern("chr"); i_max_nesting = rb_intern("max_nesting"); i_allow_nan = rb_intern("allow_nan"); i_symbolize_names = rb_intern("symbolize_names"); i_object_class = rb_intern("object_class"); i_array_class = rb_intern("array_class"); i_decimal_class = rb_intern("decimal_class"); i_match = rb_intern("match"); i_match_string = rb_intern("match_string"); i_key_p = rb_intern("key?"); i_deep_const_get = rb_intern("deep_const_get"); i_aset = rb_intern("[]="); i_aref = rb_intern("[]"); i_leftshift = rb_intern("<<"); i_new = rb_intern("new"); } /* * Local variables: * mode: c * c-file-style: ruby * indent-tabs-mode: nil * End: */ ruby-json-2.1.0+dfsg.orig/ext/json/ext/parser/extconf.rb0000644000175000017500000000016613113111601022525 0ustar boutilboutil# frozen_string_literal: false require 'mkmf' have_func("rb_enc_raise", "ruby.h") create_makefile 'json/ext/parser' ruby-json-2.1.0+dfsg.orig/ext/json/ext/parser/parser.c0000644000175000017500000013005313113111601022171 0ustar boutilboutil #line 1 "parser.rl" #include "../fbuffer/fbuffer.h" #include "parser.h" #if defined HAVE_RUBY_ENCODING_H # define EXC_ENCODING rb_utf8_encoding(), # ifndef HAVE_RB_ENC_RAISE static void enc_raise(rb_encoding *enc, VALUE exc, const char *fmt, ...) { va_list args; VALUE mesg; va_start(args, fmt); mesg = rb_enc_vsprintf(enc, fmt, args); va_end(args); rb_exc_raise(rb_exc_new3(exc, mesg)); } # define rb_enc_raise enc_raise # endif #else # define EXC_ENCODING /* nothing */ # define rb_enc_raise rb_raise #endif /* unicode */ static const char digit_values[256] = { -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, -1, -1, -1, -1, -1, -1, -1, 10, 11, 12, 13, 14, 15, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 10, 11, 12, 13, 14, 15, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1 }; static UTF32 unescape_unicode(const unsigned char *p) { char b; UTF32 result = 0; b = digit_values[p[0]]; if (b < 0) return UNI_REPLACEMENT_CHAR; result = (result << 4) | (unsigned char)b; b = digit_values[p[1]]; if (b < 0) return UNI_REPLACEMENT_CHAR; result = (result << 4) | (unsigned char)b; b = digit_values[p[2]]; if (b < 0) return UNI_REPLACEMENT_CHAR; result = (result << 4) | (unsigned char)b; b = digit_values[p[3]]; if (b < 0) return UNI_REPLACEMENT_CHAR; result = (result << 4) | (unsigned char)b; return result; } static int convert_UTF32_to_UTF8(char *buf, UTF32 ch) { int len = 1; if (ch <= 0x7F) { buf[0] = (char) ch; } else if (ch <= 0x07FF) { buf[0] = (char) ((ch >> 6) | 0xC0); buf[1] = (char) ((ch & 0x3F) | 0x80); len++; } else if (ch <= 0xFFFF) { buf[0] = (char) ((ch >> 12) | 0xE0); buf[1] = (char) (((ch >> 6) & 0x3F) | 0x80); buf[2] = (char) ((ch & 0x3F) | 0x80); len += 2; } else if (ch <= 0x1fffff) { buf[0] =(char) ((ch >> 18) | 0xF0); buf[1] =(char) (((ch >> 12) & 0x3F) | 0x80); buf[2] =(char) (((ch >> 6) & 0x3F) | 0x80); buf[3] =(char) ((ch & 0x3F) | 0x80); len += 3; } else { buf[0] = '?'; } return len; } static VALUE mJSON, mExt, cParser, eParserError, eNestingError; static VALUE CNaN, CInfinity, CMinusInfinity; static ID i_json_creatable_p, i_json_create, i_create_id, i_create_additions, i_chr, i_max_nesting, i_allow_nan, i_symbolize_names, i_object_class, i_array_class, i_decimal_class, i_key_p, i_deep_const_get, i_match, i_match_string, i_aset, i_aref, i_leftshift, i_new; #line 125 "parser.rl" #line 107 "parser.c" enum {JSON_object_start = 1}; enum {JSON_object_first_final = 27}; enum {JSON_object_error = 0}; enum {JSON_object_en_main = 1}; #line 166 "parser.rl" static char *JSON_parse_object(JSON_Parser *json, char *p, char *pe, VALUE *result, int current_nesting) { int cs = EVIL; VALUE last_name = Qnil; VALUE object_class = json->object_class; if (json->max_nesting && current_nesting > json->max_nesting) { rb_raise(eNestingError, "nesting of %d is too deep", current_nesting); } *result = NIL_P(object_class) ? rb_hash_new() : rb_class_new_instance(0, 0, object_class); #line 131 "parser.c" { cs = JSON_object_start; } #line 181 "parser.rl" #line 138 "parser.c" { if ( p == pe ) goto _test_eof; switch ( cs ) { case 1: if ( (*p) == 123 ) goto st2; goto st0; st0: cs = 0; goto _out; st2: if ( ++p == pe ) goto _test_eof2; case 2: switch( (*p) ) { case 13: goto st2; case 32: goto st2; case 34: goto tr2; case 47: goto st23; case 125: goto tr4; } if ( 9 <= (*p) && (*p) <= 10 ) goto st2; goto st0; tr2: #line 148 "parser.rl" { char *np; json->parsing_name = 1; np = JSON_parse_string(json, p, pe, &last_name); json->parsing_name = 0; if (np == NULL) { p--; {p++; cs = 3; goto _out;} } else {p = (( np))-1;} } goto st3; st3: if ( ++p == pe ) goto _test_eof3; case 3: #line 179 "parser.c" switch( (*p) ) { case 13: goto st3; case 32: goto st3; case 47: goto st4; case 58: goto st8; } if ( 9 <= (*p) && (*p) <= 10 ) goto st3; goto st0; st4: if ( ++p == pe ) goto _test_eof4; case 4: switch( (*p) ) { case 42: goto st5; case 47: goto st7; } goto st0; st5: if ( ++p == pe ) goto _test_eof5; case 5: if ( (*p) == 42 ) goto st6; goto st5; st6: if ( ++p == pe ) goto _test_eof6; case 6: switch( (*p) ) { case 42: goto st6; case 47: goto st3; } goto st5; st7: if ( ++p == pe ) goto _test_eof7; case 7: if ( (*p) == 10 ) goto st3; goto st7; st8: if ( ++p == pe ) goto _test_eof8; case 8: switch( (*p) ) { case 13: goto st8; case 32: goto st8; case 34: goto tr11; case 45: goto tr11; case 47: goto st19; case 73: goto tr11; case 78: goto tr11; case 91: goto tr11; case 102: goto tr11; case 110: goto tr11; case 116: goto tr11; case 123: goto tr11; } if ( (*p) > 10 ) { if ( 48 <= (*p) && (*p) <= 57 ) goto tr11; } else if ( (*p) >= 9 ) goto st8; goto st0; tr11: #line 133 "parser.rl" { VALUE v = Qnil; char *np = JSON_parse_value(json, p, pe, &v, current_nesting); if (np == NULL) { p--; {p++; cs = 9; goto _out;} } else { if (NIL_P(json->object_class)) { rb_hash_aset(*result, last_name, v); } else { rb_funcall(*result, i_aset, 2, last_name, v); } {p = (( np))-1;} } } goto st9; st9: if ( ++p == pe ) goto _test_eof9; case 9: #line 266 "parser.c" switch( (*p) ) { case 13: goto st9; case 32: goto st9; case 44: goto st10; case 47: goto st15; case 125: goto tr4; } if ( 9 <= (*p) && (*p) <= 10 ) goto st9; goto st0; st10: if ( ++p == pe ) goto _test_eof10; case 10: switch( (*p) ) { case 13: goto st10; case 32: goto st10; case 34: goto tr2; case 47: goto st11; } if ( 9 <= (*p) && (*p) <= 10 ) goto st10; goto st0; st11: if ( ++p == pe ) goto _test_eof11; case 11: switch( (*p) ) { case 42: goto st12; case 47: goto st14; } goto st0; st12: if ( ++p == pe ) goto _test_eof12; case 12: if ( (*p) == 42 ) goto st13; goto st12; st13: if ( ++p == pe ) goto _test_eof13; case 13: switch( (*p) ) { case 42: goto st13; case 47: goto st10; } goto st12; st14: if ( ++p == pe ) goto _test_eof14; case 14: if ( (*p) == 10 ) goto st10; goto st14; st15: if ( ++p == pe ) goto _test_eof15; case 15: switch( (*p) ) { case 42: goto st16; case 47: goto st18; } goto st0; st16: if ( ++p == pe ) goto _test_eof16; case 16: if ( (*p) == 42 ) goto st17; goto st16; st17: if ( ++p == pe ) goto _test_eof17; case 17: switch( (*p) ) { case 42: goto st17; case 47: goto st9; } goto st16; st18: if ( ++p == pe ) goto _test_eof18; case 18: if ( (*p) == 10 ) goto st9; goto st18; tr4: #line 156 "parser.rl" { p--; {p++; cs = 27; goto _out;} } goto st27; st27: if ( ++p == pe ) goto _test_eof27; case 27: #line 362 "parser.c" goto st0; st19: if ( ++p == pe ) goto _test_eof19; case 19: switch( (*p) ) { case 42: goto st20; case 47: goto st22; } goto st0; st20: if ( ++p == pe ) goto _test_eof20; case 20: if ( (*p) == 42 ) goto st21; goto st20; st21: if ( ++p == pe ) goto _test_eof21; case 21: switch( (*p) ) { case 42: goto st21; case 47: goto st8; } goto st20; st22: if ( ++p == pe ) goto _test_eof22; case 22: if ( (*p) == 10 ) goto st8; goto st22; st23: if ( ++p == pe ) goto _test_eof23; case 23: switch( (*p) ) { case 42: goto st24; case 47: goto st26; } goto st0; st24: if ( ++p == pe ) goto _test_eof24; case 24: if ( (*p) == 42 ) goto st25; goto st24; st25: if ( ++p == pe ) goto _test_eof25; case 25: switch( (*p) ) { case 42: goto st25; case 47: goto st2; } goto st24; st26: if ( ++p == pe ) goto _test_eof26; case 26: if ( (*p) == 10 ) goto st2; goto st26; } _test_eof2: cs = 2; goto _test_eof; _test_eof3: cs = 3; goto _test_eof; _test_eof4: cs = 4; goto _test_eof; _test_eof5: cs = 5; goto _test_eof; _test_eof6: cs = 6; goto _test_eof; _test_eof7: cs = 7; goto _test_eof; _test_eof8: cs = 8; goto _test_eof; _test_eof9: cs = 9; goto _test_eof; _test_eof10: cs = 10; goto _test_eof; _test_eof11: cs = 11; goto _test_eof; _test_eof12: cs = 12; goto _test_eof; _test_eof13: cs = 13; goto _test_eof; _test_eof14: cs = 14; goto _test_eof; _test_eof15: cs = 15; goto _test_eof; _test_eof16: cs = 16; goto _test_eof; _test_eof17: cs = 17; goto _test_eof; _test_eof18: cs = 18; goto _test_eof; _test_eof27: cs = 27; goto _test_eof; _test_eof19: cs = 19; goto _test_eof; _test_eof20: cs = 20; goto _test_eof; _test_eof21: cs = 21; goto _test_eof; _test_eof22: cs = 22; goto _test_eof; _test_eof23: cs = 23; goto _test_eof; _test_eof24: cs = 24; goto _test_eof; _test_eof25: cs = 25; goto _test_eof; _test_eof26: cs = 26; goto _test_eof; _test_eof: {} _out: {} } #line 182 "parser.rl" if (cs >= JSON_object_first_final) { if (json->create_additions) { VALUE klassname; if (NIL_P(json->object_class)) { klassname = rb_hash_aref(*result, json->create_id); } else { klassname = rb_funcall(*result, i_aref, 1, json->create_id); } if (!NIL_P(klassname)) { VALUE klass = rb_funcall(mJSON, i_deep_const_get, 1, klassname); if (RTEST(rb_funcall(klass, i_json_creatable_p, 0))) { *result = rb_funcall(klass, i_json_create, 1, *result); } } } return p + 1; } else { return NULL; } } #line 485 "parser.c" enum {JSON_value_start = 1}; enum {JSON_value_first_final = 29}; enum {JSON_value_error = 0}; enum {JSON_value_en_main = 1}; #line 282 "parser.rl" static char *JSON_parse_value(JSON_Parser *json, char *p, char *pe, VALUE *result, int current_nesting) { int cs = EVIL; #line 501 "parser.c" { cs = JSON_value_start; } #line 289 "parser.rl" #line 508 "parser.c" { if ( p == pe ) goto _test_eof; switch ( cs ) { st1: if ( ++p == pe ) goto _test_eof1; case 1: switch( (*p) ) { case 13: goto st1; case 32: goto st1; case 34: goto tr2; case 45: goto tr3; case 47: goto st6; case 73: goto st10; case 78: goto st17; case 91: goto tr7; case 102: goto st19; case 110: goto st23; case 116: goto st26; case 123: goto tr11; } if ( (*p) > 10 ) { if ( 48 <= (*p) && (*p) <= 57 ) goto tr3; } else if ( (*p) >= 9 ) goto st1; goto st0; st0: cs = 0; goto _out; tr2: #line 234 "parser.rl" { char *np = JSON_parse_string(json, p, pe, result); if (np == NULL) { p--; {p++; cs = 29; goto _out;} } else {p = (( np))-1;} } goto st29; tr3: #line 239 "parser.rl" { char *np; if(pe > p + 8 && !strncmp(MinusInfinity, p, 9)) { if (json->allow_nan) { *result = CMinusInfinity; {p = (( p + 10))-1;} p--; {p++; cs = 29; goto _out;} } else { rb_enc_raise(EXC_ENCODING eParserError, "%u: unexpected token at '%s'", __LINE__, p); } } np = JSON_parse_float(json, p, pe, result); if (np != NULL) {p = (( np))-1;} np = JSON_parse_integer(json, p, pe, result); if (np != NULL) {p = (( np))-1;} p--; {p++; cs = 29; goto _out;} } goto st29; tr7: #line 257 "parser.rl" { char *np; np = JSON_parse_array(json, p, pe, result, current_nesting + 1); if (np == NULL) { p--; {p++; cs = 29; goto _out;} } else {p = (( np))-1;} } goto st29; tr11: #line 263 "parser.rl" { char *np; np = JSON_parse_object(json, p, pe, result, current_nesting + 1); if (np == NULL) { p--; {p++; cs = 29; goto _out;} } else {p = (( np))-1;} } goto st29; tr25: #line 227 "parser.rl" { if (json->allow_nan) { *result = CInfinity; } else { rb_enc_raise(EXC_ENCODING eParserError, "%u: unexpected token at '%s'", __LINE__, p - 8); } } goto st29; tr27: #line 220 "parser.rl" { if (json->allow_nan) { *result = CNaN; } else { rb_enc_raise(EXC_ENCODING eParserError, "%u: unexpected token at '%s'", __LINE__, p - 2); } } goto st29; tr31: #line 214 "parser.rl" { *result = Qfalse; } goto st29; tr34: #line 211 "parser.rl" { *result = Qnil; } goto st29; tr37: #line 217 "parser.rl" { *result = Qtrue; } goto st29; st29: if ( ++p == pe ) goto _test_eof29; case 29: #line 269 "parser.rl" { p--; {p++; cs = 29; goto _out;} } #line 628 "parser.c" switch( (*p) ) { case 13: goto st29; case 32: goto st29; case 47: goto st2; } if ( 9 <= (*p) && (*p) <= 10 ) goto st29; goto st0; st2: if ( ++p == pe ) goto _test_eof2; case 2: switch( (*p) ) { case 42: goto st3; case 47: goto st5; } goto st0; st3: if ( ++p == pe ) goto _test_eof3; case 3: if ( (*p) == 42 ) goto st4; goto st3; st4: if ( ++p == pe ) goto _test_eof4; case 4: switch( (*p) ) { case 42: goto st4; case 47: goto st29; } goto st3; st5: if ( ++p == pe ) goto _test_eof5; case 5: if ( (*p) == 10 ) goto st29; goto st5; st6: if ( ++p == pe ) goto _test_eof6; case 6: switch( (*p) ) { case 42: goto st7; case 47: goto st9; } goto st0; st7: if ( ++p == pe ) goto _test_eof7; case 7: if ( (*p) == 42 ) goto st8; goto st7; st8: if ( ++p == pe ) goto _test_eof8; case 8: switch( (*p) ) { case 42: goto st8; case 47: goto st1; } goto st7; st9: if ( ++p == pe ) goto _test_eof9; case 9: if ( (*p) == 10 ) goto st1; goto st9; st10: if ( ++p == pe ) goto _test_eof10; case 10: if ( (*p) == 110 ) goto st11; goto st0; st11: if ( ++p == pe ) goto _test_eof11; case 11: if ( (*p) == 102 ) goto st12; goto st0; st12: if ( ++p == pe ) goto _test_eof12; case 12: if ( (*p) == 105 ) goto st13; goto st0; st13: if ( ++p == pe ) goto _test_eof13; case 13: if ( (*p) == 110 ) goto st14; goto st0; st14: if ( ++p == pe ) goto _test_eof14; case 14: if ( (*p) == 105 ) goto st15; goto st0; st15: if ( ++p == pe ) goto _test_eof15; case 15: if ( (*p) == 116 ) goto st16; goto st0; st16: if ( ++p == pe ) goto _test_eof16; case 16: if ( (*p) == 121 ) goto tr25; goto st0; st17: if ( ++p == pe ) goto _test_eof17; case 17: if ( (*p) == 97 ) goto st18; goto st0; st18: if ( ++p == pe ) goto _test_eof18; case 18: if ( (*p) == 78 ) goto tr27; goto st0; st19: if ( ++p == pe ) goto _test_eof19; case 19: if ( (*p) == 97 ) goto st20; goto st0; st20: if ( ++p == pe ) goto _test_eof20; case 20: if ( (*p) == 108 ) goto st21; goto st0; st21: if ( ++p == pe ) goto _test_eof21; case 21: if ( (*p) == 115 ) goto st22; goto st0; st22: if ( ++p == pe ) goto _test_eof22; case 22: if ( (*p) == 101 ) goto tr31; goto st0; st23: if ( ++p == pe ) goto _test_eof23; case 23: if ( (*p) == 117 ) goto st24; goto st0; st24: if ( ++p == pe ) goto _test_eof24; case 24: if ( (*p) == 108 ) goto st25; goto st0; st25: if ( ++p == pe ) goto _test_eof25; case 25: if ( (*p) == 108 ) goto tr34; goto st0; st26: if ( ++p == pe ) goto _test_eof26; case 26: if ( (*p) == 114 ) goto st27; goto st0; st27: if ( ++p == pe ) goto _test_eof27; case 27: if ( (*p) == 117 ) goto st28; goto st0; st28: if ( ++p == pe ) goto _test_eof28; case 28: if ( (*p) == 101 ) goto tr37; goto st0; } _test_eof1: cs = 1; goto _test_eof; _test_eof29: cs = 29; goto _test_eof; _test_eof2: cs = 2; goto _test_eof; _test_eof3: cs = 3; goto _test_eof; _test_eof4: cs = 4; goto _test_eof; _test_eof5: cs = 5; goto _test_eof; _test_eof6: cs = 6; goto _test_eof; _test_eof7: cs = 7; goto _test_eof; _test_eof8: cs = 8; goto _test_eof; _test_eof9: cs = 9; goto _test_eof; _test_eof10: cs = 10; goto _test_eof; _test_eof11: cs = 11; goto _test_eof; _test_eof12: cs = 12; goto _test_eof; _test_eof13: cs = 13; goto _test_eof; _test_eof14: cs = 14; goto _test_eof; _test_eof15: cs = 15; goto _test_eof; _test_eof16: cs = 16; goto _test_eof; _test_eof17: cs = 17; goto _test_eof; _test_eof18: cs = 18; goto _test_eof; _test_eof19: cs = 19; goto _test_eof; _test_eof20: cs = 20; goto _test_eof; _test_eof21: cs = 21; goto _test_eof; _test_eof22: cs = 22; goto _test_eof; _test_eof23: cs = 23; goto _test_eof; _test_eof24: cs = 24; goto _test_eof; _test_eof25: cs = 25; goto _test_eof; _test_eof26: cs = 26; goto _test_eof; _test_eof27: cs = 27; goto _test_eof; _test_eof28: cs = 28; goto _test_eof; _test_eof: {} _out: {} } #line 290 "parser.rl" if (cs >= JSON_value_first_final) { return p; } else { return NULL; } } #line 879 "parser.c" enum {JSON_integer_start = 1}; enum {JSON_integer_first_final = 3}; enum {JSON_integer_error = 0}; enum {JSON_integer_en_main = 1}; #line 306 "parser.rl" static char *JSON_parse_integer(JSON_Parser *json, char *p, char *pe, VALUE *result) { int cs = EVIL; #line 895 "parser.c" { cs = JSON_integer_start; } #line 313 "parser.rl" json->memo = p; #line 903 "parser.c" { if ( p == pe ) goto _test_eof; switch ( cs ) { case 1: switch( (*p) ) { case 45: goto st2; case 48: goto st3; } if ( 49 <= (*p) && (*p) <= 57 ) goto st5; goto st0; st0: cs = 0; goto _out; st2: if ( ++p == pe ) goto _test_eof2; case 2: if ( (*p) == 48 ) goto st3; if ( 49 <= (*p) && (*p) <= 57 ) goto st5; goto st0; st3: if ( ++p == pe ) goto _test_eof3; case 3: if ( 48 <= (*p) && (*p) <= 57 ) goto st0; goto tr4; tr4: #line 303 "parser.rl" { p--; {p++; cs = 4; goto _out;} } goto st4; st4: if ( ++p == pe ) goto _test_eof4; case 4: #line 944 "parser.c" goto st0; st5: if ( ++p == pe ) goto _test_eof5; case 5: if ( 48 <= (*p) && (*p) <= 57 ) goto st5; goto tr4; } _test_eof2: cs = 2; goto _test_eof; _test_eof3: cs = 3; goto _test_eof; _test_eof4: cs = 4; goto _test_eof; _test_eof5: cs = 5; goto _test_eof; _test_eof: {} _out: {} } #line 315 "parser.rl" if (cs >= JSON_integer_first_final) { long len = p - json->memo; fbuffer_clear(json->fbuffer); fbuffer_append(json->fbuffer, json->memo, len); fbuffer_append_char(json->fbuffer, '\0'); *result = rb_cstr2inum(FBUFFER_PTR(json->fbuffer), 10); return p + 1; } else { return NULL; } } #line 978 "parser.c" enum {JSON_float_start = 1}; enum {JSON_float_first_final = 8}; enum {JSON_float_error = 0}; enum {JSON_float_en_main = 1}; #line 340 "parser.rl" static char *JSON_parse_float(JSON_Parser *json, char *p, char *pe, VALUE *result) { int cs = EVIL; #line 994 "parser.c" { cs = JSON_float_start; } #line 347 "parser.rl" json->memo = p; #line 1002 "parser.c" { if ( p == pe ) goto _test_eof; switch ( cs ) { case 1: switch( (*p) ) { case 45: goto st2; case 48: goto st3; } if ( 49 <= (*p) && (*p) <= 57 ) goto st7; goto st0; st0: cs = 0; goto _out; st2: if ( ++p == pe ) goto _test_eof2; case 2: if ( (*p) == 48 ) goto st3; if ( 49 <= (*p) && (*p) <= 57 ) goto st7; goto st0; st3: if ( ++p == pe ) goto _test_eof3; case 3: switch( (*p) ) { case 46: goto st4; case 69: goto st5; case 101: goto st5; } goto st0; st4: if ( ++p == pe ) goto _test_eof4; case 4: if ( 48 <= (*p) && (*p) <= 57 ) goto st8; goto st0; st8: if ( ++p == pe ) goto _test_eof8; case 8: switch( (*p) ) { case 69: goto st5; case 101: goto st5; } if ( (*p) > 46 ) { if ( 48 <= (*p) && (*p) <= 57 ) goto st8; } else if ( (*p) >= 45 ) goto st0; goto tr9; tr9: #line 334 "parser.rl" { p--; {p++; cs = 9; goto _out;} } goto st9; st9: if ( ++p == pe ) goto _test_eof9; case 9: #line 1067 "parser.c" goto st0; st5: if ( ++p == pe ) goto _test_eof5; case 5: switch( (*p) ) { case 43: goto st6; case 45: goto st6; } if ( 48 <= (*p) && (*p) <= 57 ) goto st10; goto st0; st6: if ( ++p == pe ) goto _test_eof6; case 6: if ( 48 <= (*p) && (*p) <= 57 ) goto st10; goto st0; st10: if ( ++p == pe ) goto _test_eof10; case 10: switch( (*p) ) { case 69: goto st0; case 101: goto st0; } if ( (*p) > 46 ) { if ( 48 <= (*p) && (*p) <= 57 ) goto st10; } else if ( (*p) >= 45 ) goto st0; goto tr9; st7: if ( ++p == pe ) goto _test_eof7; case 7: switch( (*p) ) { case 46: goto st4; case 69: goto st5; case 101: goto st5; } if ( 48 <= (*p) && (*p) <= 57 ) goto st7; goto st0; } _test_eof2: cs = 2; goto _test_eof; _test_eof3: cs = 3; goto _test_eof; _test_eof4: cs = 4; goto _test_eof; _test_eof8: cs = 8; goto _test_eof; _test_eof9: cs = 9; goto _test_eof; _test_eof5: cs = 5; goto _test_eof; _test_eof6: cs = 6; goto _test_eof; _test_eof10: cs = 10; goto _test_eof; _test_eof7: cs = 7; goto _test_eof; _test_eof: {} _out: {} } #line 349 "parser.rl" if (cs >= JSON_float_first_final) { long len = p - json->memo; fbuffer_clear(json->fbuffer); fbuffer_append(json->fbuffer, json->memo, len); fbuffer_append_char(json->fbuffer, '\0'); if (NIL_P(json->decimal_class)) { *result = rb_float_new(rb_cstr_to_dbl(FBUFFER_PTR(json->fbuffer), 1)); } else { VALUE text; text = rb_str_new2(FBUFFER_PTR(json->fbuffer)); *result = rb_funcall(json->decimal_class, i_new, 1, text); } return p + 1; } else { return NULL; } } #line 1150 "parser.c" enum {JSON_array_start = 1}; enum {JSON_array_first_final = 17}; enum {JSON_array_error = 0}; enum {JSON_array_en_main = 1}; #line 398 "parser.rl" static char *JSON_parse_array(JSON_Parser *json, char *p, char *pe, VALUE *result, int current_nesting) { int cs = EVIL; VALUE array_class = json->array_class; if (json->max_nesting && current_nesting > json->max_nesting) { rb_raise(eNestingError, "nesting of %d is too deep", current_nesting); } *result = NIL_P(array_class) ? rb_ary_new() : rb_class_new_instance(0, 0, array_class); #line 1172 "parser.c" { cs = JSON_array_start; } #line 411 "parser.rl" #line 1179 "parser.c" { if ( p == pe ) goto _test_eof; switch ( cs ) { case 1: if ( (*p) == 91 ) goto st2; goto st0; st0: cs = 0; goto _out; st2: if ( ++p == pe ) goto _test_eof2; case 2: switch( (*p) ) { case 13: goto st2; case 32: goto st2; case 34: goto tr2; case 45: goto tr2; case 47: goto st13; case 73: goto tr2; case 78: goto tr2; case 91: goto tr2; case 93: goto tr4; case 102: goto tr2; case 110: goto tr2; case 116: goto tr2; case 123: goto tr2; } if ( (*p) > 10 ) { if ( 48 <= (*p) && (*p) <= 57 ) goto tr2; } else if ( (*p) >= 9 ) goto st2; goto st0; tr2: #line 375 "parser.rl" { VALUE v = Qnil; char *np = JSON_parse_value(json, p, pe, &v, current_nesting); if (np == NULL) { p--; {p++; cs = 3; goto _out;} } else { if (NIL_P(json->array_class)) { rb_ary_push(*result, v); } else { rb_funcall(*result, i_leftshift, 1, v); } {p = (( np))-1;} } } goto st3; st3: if ( ++p == pe ) goto _test_eof3; case 3: #line 1238 "parser.c" switch( (*p) ) { case 13: goto st3; case 32: goto st3; case 44: goto st4; case 47: goto st9; case 93: goto tr4; } if ( 9 <= (*p) && (*p) <= 10 ) goto st3; goto st0; st4: if ( ++p == pe ) goto _test_eof4; case 4: switch( (*p) ) { case 13: goto st4; case 32: goto st4; case 34: goto tr2; case 45: goto tr2; case 47: goto st5; case 73: goto tr2; case 78: goto tr2; case 91: goto tr2; case 102: goto tr2; case 110: goto tr2; case 116: goto tr2; case 123: goto tr2; } if ( (*p) > 10 ) { if ( 48 <= (*p) && (*p) <= 57 ) goto tr2; } else if ( (*p) >= 9 ) goto st4; goto st0; st5: if ( ++p == pe ) goto _test_eof5; case 5: switch( (*p) ) { case 42: goto st6; case 47: goto st8; } goto st0; st6: if ( ++p == pe ) goto _test_eof6; case 6: if ( (*p) == 42 ) goto st7; goto st6; st7: if ( ++p == pe ) goto _test_eof7; case 7: switch( (*p) ) { case 42: goto st7; case 47: goto st4; } goto st6; st8: if ( ++p == pe ) goto _test_eof8; case 8: if ( (*p) == 10 ) goto st4; goto st8; st9: if ( ++p == pe ) goto _test_eof9; case 9: switch( (*p) ) { case 42: goto st10; case 47: goto st12; } goto st0; st10: if ( ++p == pe ) goto _test_eof10; case 10: if ( (*p) == 42 ) goto st11; goto st10; st11: if ( ++p == pe ) goto _test_eof11; case 11: switch( (*p) ) { case 42: goto st11; case 47: goto st3; } goto st10; st12: if ( ++p == pe ) goto _test_eof12; case 12: if ( (*p) == 10 ) goto st3; goto st12; tr4: #line 390 "parser.rl" { p--; {p++; cs = 17; goto _out;} } goto st17; st17: if ( ++p == pe ) goto _test_eof17; case 17: #line 1345 "parser.c" goto st0; st13: if ( ++p == pe ) goto _test_eof13; case 13: switch( (*p) ) { case 42: goto st14; case 47: goto st16; } goto st0; st14: if ( ++p == pe ) goto _test_eof14; case 14: if ( (*p) == 42 ) goto st15; goto st14; st15: if ( ++p == pe ) goto _test_eof15; case 15: switch( (*p) ) { case 42: goto st15; case 47: goto st2; } goto st14; st16: if ( ++p == pe ) goto _test_eof16; case 16: if ( (*p) == 10 ) goto st2; goto st16; } _test_eof2: cs = 2; goto _test_eof; _test_eof3: cs = 3; goto _test_eof; _test_eof4: cs = 4; goto _test_eof; _test_eof5: cs = 5; goto _test_eof; _test_eof6: cs = 6; goto _test_eof; _test_eof7: cs = 7; goto _test_eof; _test_eof8: cs = 8; goto _test_eof; _test_eof9: cs = 9; goto _test_eof; _test_eof10: cs = 10; goto _test_eof; _test_eof11: cs = 11; goto _test_eof; _test_eof12: cs = 12; goto _test_eof; _test_eof17: cs = 17; goto _test_eof; _test_eof13: cs = 13; goto _test_eof; _test_eof14: cs = 14; goto _test_eof; _test_eof15: cs = 15; goto _test_eof; _test_eof16: cs = 16; goto _test_eof; _test_eof: {} _out: {} } #line 412 "parser.rl" if(cs >= JSON_array_first_final) { return p + 1; } else { rb_enc_raise(EXC_ENCODING eParserError, "%u: unexpected token at '%s'", __LINE__, p); return NULL; } } static VALUE json_string_unescape(VALUE result, char *string, char *stringEnd) { char *p = string, *pe = string, *unescape; int unescape_len; char buf[4]; while (pe < stringEnd) { if (*pe == '\\') { unescape = (char *) "?"; unescape_len = 1; if (pe > p) rb_str_buf_cat(result, p, pe - p); switch (*++pe) { case 'n': unescape = (char *) "\n"; break; case 'r': unescape = (char *) "\r"; break; case 't': unescape = (char *) "\t"; break; case '"': unescape = (char *) "\""; break; case '\\': unescape = (char *) "\\"; break; case 'b': unescape = (char *) "\b"; break; case 'f': unescape = (char *) "\f"; break; case 'u': if (pe > stringEnd - 4) { rb_enc_raise( EXC_ENCODING eParserError, "%u: incomplete unicode character escape sequence at '%s'", __LINE__, p ); } else { UTF32 ch = unescape_unicode((unsigned char *) ++pe); pe += 3; if (UNI_SUR_HIGH_START == (ch & 0xFC00)) { pe++; if (pe > stringEnd - 6) { rb_enc_raise( EXC_ENCODING eParserError, "%u: incomplete surrogate pair at '%s'", __LINE__, p ); } if (pe[0] == '\\' && pe[1] == 'u') { UTF32 sur = unescape_unicode((unsigned char *) pe + 2); ch = (((ch & 0x3F) << 10) | ((((ch >> 6) & 0xF) + 1) << 16) | (sur & 0x3FF)); pe += 5; } else { unescape = (char *) "?"; break; } } unescape_len = convert_UTF32_to_UTF8(buf, ch); unescape = buf; } break; default: p = pe; continue; } rb_str_buf_cat(result, unescape, unescape_len); p = ++pe; } else { pe++; } } rb_str_buf_cat(result, p, pe - p); return result; } #line 1490 "parser.c" enum {JSON_string_start = 1}; enum {JSON_string_first_final = 8}; enum {JSON_string_error = 0}; enum {JSON_string_en_main = 1}; #line 519 "parser.rl" static int match_i(VALUE regexp, VALUE klass, VALUE memo) { if (regexp == Qundef) return ST_STOP; if (RTEST(rb_funcall(klass, i_json_creatable_p, 0)) && RTEST(rb_funcall(regexp, i_match, 1, rb_ary_entry(memo, 0)))) { rb_ary_push(memo, klass); return ST_STOP; } return ST_CONTINUE; } static char *JSON_parse_string(JSON_Parser *json, char *p, char *pe, VALUE *result) { int cs = EVIL; VALUE match_string; *result = rb_str_buf_new(0); #line 1520 "parser.c" { cs = JSON_string_start; } #line 540 "parser.rl" json->memo = p; #line 1528 "parser.c" { if ( p == pe ) goto _test_eof; switch ( cs ) { case 1: if ( (*p) == 34 ) goto st2; goto st0; st0: cs = 0; goto _out; st2: if ( ++p == pe ) goto _test_eof2; case 2: switch( (*p) ) { case 34: goto tr2; case 92: goto st3; } if ( 0 <= (*p) && (*p) <= 31 ) goto st0; goto st2; tr2: #line 505 "parser.rl" { *result = json_string_unescape(*result, json->memo + 1, p); if (NIL_P(*result)) { p--; {p++; cs = 8; goto _out;} } else { FORCE_UTF8(*result); {p = (( p + 1))-1;} } } #line 516 "parser.rl" { p--; {p++; cs = 8; goto _out;} } goto st8; st8: if ( ++p == pe ) goto _test_eof8; case 8: #line 1571 "parser.c" goto st0; st3: if ( ++p == pe ) goto _test_eof3; case 3: if ( (*p) == 117 ) goto st4; if ( 0 <= (*p) && (*p) <= 31 ) goto st0; goto st2; st4: if ( ++p == pe ) goto _test_eof4; case 4: if ( (*p) < 65 ) { if ( 48 <= (*p) && (*p) <= 57 ) goto st5; } else if ( (*p) > 70 ) { if ( 97 <= (*p) && (*p) <= 102 ) goto st5; } else goto st5; goto st0; st5: if ( ++p == pe ) goto _test_eof5; case 5: if ( (*p) < 65 ) { if ( 48 <= (*p) && (*p) <= 57 ) goto st6; } else if ( (*p) > 70 ) { if ( 97 <= (*p) && (*p) <= 102 ) goto st6; } else goto st6; goto st0; st6: if ( ++p == pe ) goto _test_eof6; case 6: if ( (*p) < 65 ) { if ( 48 <= (*p) && (*p) <= 57 ) goto st7; } else if ( (*p) > 70 ) { if ( 97 <= (*p) && (*p) <= 102 ) goto st7; } else goto st7; goto st0; st7: if ( ++p == pe ) goto _test_eof7; case 7: if ( (*p) < 65 ) { if ( 48 <= (*p) && (*p) <= 57 ) goto st2; } else if ( (*p) > 70 ) { if ( 97 <= (*p) && (*p) <= 102 ) goto st2; } else goto st2; goto st0; } _test_eof2: cs = 2; goto _test_eof; _test_eof8: cs = 8; goto _test_eof; _test_eof3: cs = 3; goto _test_eof; _test_eof4: cs = 4; goto _test_eof; _test_eof5: cs = 5; goto _test_eof; _test_eof6: cs = 6; goto _test_eof; _test_eof7: cs = 7; goto _test_eof; _test_eof: {} _out: {} } #line 542 "parser.rl" if (json->create_additions && RTEST(match_string = json->match_string)) { VALUE klass; VALUE memo = rb_ary_new2(2); rb_ary_push(memo, *result); rb_hash_foreach(match_string, match_i, memo); klass = rb_ary_entry(memo, 1); if (RTEST(klass)) { *result = rb_funcall(klass, i_json_create, 1, *result); } } if (json->symbolize_names && json->parsing_name) { *result = rb_str_intern(*result); } else { rb_str_resize(*result, RSTRING_LEN(*result)); } if (cs >= JSON_string_first_final) { return p + 1; } else { return NULL; } } /* * Document-class: JSON::Ext::Parser * * This is the JSON parser implemented as a C extension. It can be configured * to be used by setting * * JSON.parser = JSON::Ext::Parser * * with the method parser= in JSON. * */ static VALUE convert_encoding(VALUE source) { #ifdef HAVE_RUBY_ENCODING_H rb_encoding *enc = rb_enc_get(source); if (enc == rb_ascii8bit_encoding()) { if (OBJ_FROZEN(source)) { source = rb_str_dup(source); } FORCE_UTF8(source); } else { source = rb_str_conv_enc(source, rb_enc_get(source), rb_utf8_encoding()); } #endif return source; } /* * call-seq: new(source, opts => {}) * * Creates a new JSON::Ext::Parser instance for the string _source_. * * Creates a new JSON::Ext::Parser instance for the string _source_. * * It will be configured by the _opts_ hash. _opts_ can have the following * keys: * * _opts_ can have the following keys: * * *max_nesting*: The maximum depth of nesting allowed in the parsed data * structures. Disable depth checking with :max_nesting => false|nil|0, it * defaults to 100. * * *allow_nan*: If set to true, allow NaN, Infinity and -Infinity in * defiance of RFC 4627 to be parsed by the Parser. This option defaults to * false. * * *symbolize_names*: If set to true, returns symbols for the names * (keys) in a JSON object. Otherwise strings are returned, which is * also the default. It's not possible to use this option in * conjunction with the *create_additions* option. * * *create_additions*: If set to false, the Parser doesn't create * additions even if a matching class and create_id was found. This option * defaults to false. * * *object_class*: Defaults to Hash * * *array_class*: Defaults to Array */ static VALUE cParser_initialize(int argc, VALUE *argv, VALUE self) { VALUE source, opts; GET_PARSER_INIT; if (json->Vsource) { rb_raise(rb_eTypeError, "already initialized instance"); } #ifdef HAVE_RB_SCAN_ARGS_OPTIONAL_HASH rb_scan_args(argc, argv, "1:", &source, &opts); #else rb_scan_args(argc, argv, "11", &source, &opts); #endif if (!NIL_P(opts)) { #ifndef HAVE_RB_SCAN_ARGS_OPTIONAL_HASH opts = rb_convert_type(opts, T_HASH, "Hash", "to_hash"); if (NIL_P(opts)) { rb_raise(rb_eArgError, "opts needs to be like a hash"); } else { #endif VALUE tmp = ID2SYM(i_max_nesting); if (option_given_p(opts, tmp)) { VALUE max_nesting = rb_hash_aref(opts, tmp); if (RTEST(max_nesting)) { Check_Type(max_nesting, T_FIXNUM); json->max_nesting = FIX2INT(max_nesting); } else { json->max_nesting = 0; } } else { json->max_nesting = 100; } tmp = ID2SYM(i_allow_nan); if (option_given_p(opts, tmp)) { json->allow_nan = RTEST(rb_hash_aref(opts, tmp)) ? 1 : 0; } else { json->allow_nan = 0; } tmp = ID2SYM(i_symbolize_names); if (option_given_p(opts, tmp)) { json->symbolize_names = RTEST(rb_hash_aref(opts, tmp)) ? 1 : 0; } else { json->symbolize_names = 0; } tmp = ID2SYM(i_create_additions); if (option_given_p(opts, tmp)) { json->create_additions = RTEST(rb_hash_aref(opts, tmp)); } else { json->create_additions = 0; } if (json->symbolize_names && json->create_additions) { rb_raise(rb_eArgError, "options :symbolize_names and :create_additions cannot be " " used in conjunction"); } tmp = ID2SYM(i_create_id); if (option_given_p(opts, tmp)) { json->create_id = rb_hash_aref(opts, tmp); } else { json->create_id = rb_funcall(mJSON, i_create_id, 0); } tmp = ID2SYM(i_object_class); if (option_given_p(opts, tmp)) { json->object_class = rb_hash_aref(opts, tmp); } else { json->object_class = Qnil; } tmp = ID2SYM(i_array_class); if (option_given_p(opts, tmp)) { json->array_class = rb_hash_aref(opts, tmp); } else { json->array_class = Qnil; } tmp = ID2SYM(i_decimal_class); if (option_given_p(opts, tmp)) { json->decimal_class = rb_hash_aref(opts, tmp); } else { json->decimal_class = Qnil; } tmp = ID2SYM(i_match_string); if (option_given_p(opts, tmp)) { VALUE match_string = rb_hash_aref(opts, tmp); json->match_string = RTEST(match_string) ? match_string : Qnil; } else { json->match_string = Qnil; } #ifndef HAVE_RB_SCAN_ARGS_OPTIONAL_HASH } #endif } else { json->max_nesting = 100; json->allow_nan = 0; json->create_additions = 1; json->create_id = rb_funcall(mJSON, i_create_id, 0); json->object_class = Qnil; json->array_class = Qnil; json->decimal_class = Qnil; } source = convert_encoding(StringValue(source)); StringValue(source); json->len = RSTRING_LEN(source); json->source = RSTRING_PTR(source);; json->Vsource = source; return self; } #line 1834 "parser.c" enum {JSON_start = 1}; enum {JSON_first_final = 10}; enum {JSON_error = 0}; enum {JSON_en_main = 1}; #line 742 "parser.rl" /* * call-seq: parse() * * Parses the current JSON text _source_ and returns the complete data * structure as a result. */ static VALUE cParser_parse(VALUE self) { char *p, *pe; int cs = EVIL; VALUE result = Qnil; GET_PARSER; #line 1859 "parser.c" { cs = JSON_start; } #line 758 "parser.rl" p = json->source; pe = p + json->len; #line 1868 "parser.c" { if ( p == pe ) goto _test_eof; switch ( cs ) { st1: if ( ++p == pe ) goto _test_eof1; case 1: switch( (*p) ) { case 13: goto st1; case 32: goto st1; case 34: goto tr2; case 45: goto tr2; case 47: goto st6; case 73: goto tr2; case 78: goto tr2; case 91: goto tr2; case 102: goto tr2; case 110: goto tr2; case 116: goto tr2; case 123: goto tr2; } if ( (*p) > 10 ) { if ( 48 <= (*p) && (*p) <= 57 ) goto tr2; } else if ( (*p) >= 9 ) goto st1; goto st0; st0: cs = 0; goto _out; tr2: #line 734 "parser.rl" { char *np = JSON_parse_value(json, p, pe, &result, 0); if (np == NULL) { p--; {p++; cs = 10; goto _out;} } else {p = (( np))-1;} } goto st10; st10: if ( ++p == pe ) goto _test_eof10; case 10: #line 1912 "parser.c" switch( (*p) ) { case 13: goto st10; case 32: goto st10; case 47: goto st2; } if ( 9 <= (*p) && (*p) <= 10 ) goto st10; goto st0; st2: if ( ++p == pe ) goto _test_eof2; case 2: switch( (*p) ) { case 42: goto st3; case 47: goto st5; } goto st0; st3: if ( ++p == pe ) goto _test_eof3; case 3: if ( (*p) == 42 ) goto st4; goto st3; st4: if ( ++p == pe ) goto _test_eof4; case 4: switch( (*p) ) { case 42: goto st4; case 47: goto st10; } goto st3; st5: if ( ++p == pe ) goto _test_eof5; case 5: if ( (*p) == 10 ) goto st10; goto st5; st6: if ( ++p == pe ) goto _test_eof6; case 6: switch( (*p) ) { case 42: goto st7; case 47: goto st9; } goto st0; st7: if ( ++p == pe ) goto _test_eof7; case 7: if ( (*p) == 42 ) goto st8; goto st7; st8: if ( ++p == pe ) goto _test_eof8; case 8: switch( (*p) ) { case 42: goto st8; case 47: goto st1; } goto st7; st9: if ( ++p == pe ) goto _test_eof9; case 9: if ( (*p) == 10 ) goto st1; goto st9; } _test_eof1: cs = 1; goto _test_eof; _test_eof10: cs = 10; goto _test_eof; _test_eof2: cs = 2; goto _test_eof; _test_eof3: cs = 3; goto _test_eof; _test_eof4: cs = 4; goto _test_eof; _test_eof5: cs = 5; goto _test_eof; _test_eof6: cs = 6; goto _test_eof; _test_eof7: cs = 7; goto _test_eof; _test_eof8: cs = 8; goto _test_eof; _test_eof9: cs = 9; goto _test_eof; _test_eof: {} _out: {} } #line 761 "parser.rl" if (cs >= JSON_first_final && p == pe) { return result; } else { rb_enc_raise(EXC_ENCODING eParserError, "%u: unexpected token at '%s'", __LINE__, p); return Qnil; } } static void JSON_mark(void *ptr) { JSON_Parser *json = ptr; rb_gc_mark_maybe(json->Vsource); rb_gc_mark_maybe(json->create_id); rb_gc_mark_maybe(json->object_class); rb_gc_mark_maybe(json->array_class); rb_gc_mark_maybe(json->decimal_class); rb_gc_mark_maybe(json->match_string); } static void JSON_free(void *ptr) { JSON_Parser *json = ptr; fbuffer_free(json->fbuffer); ruby_xfree(json); } static size_t JSON_memsize(const void *ptr) { const JSON_Parser *json = ptr; return sizeof(*json) + FBUFFER_CAPA(json->fbuffer); } #ifdef NEW_TYPEDDATA_WRAPPER static const rb_data_type_t JSON_Parser_type = { "JSON/Parser", {JSON_mark, JSON_free, JSON_memsize,}, #ifdef RUBY_TYPED_FREE_IMMEDIATELY 0, 0, RUBY_TYPED_FREE_IMMEDIATELY, #endif }; #endif static VALUE cJSON_parser_s_allocate(VALUE klass) { JSON_Parser *json; VALUE obj = TypedData_Make_Struct(klass, JSON_Parser, &JSON_Parser_type, json); json->fbuffer = fbuffer_alloc(0); return obj; } /* * call-seq: source() * * Returns a copy of the current _source_ string, that was used to construct * this Parser. */ static VALUE cParser_source(VALUE self) { GET_PARSER; return rb_str_dup(json->Vsource); } void Init_parser(void) { rb_require("json/common"); mJSON = rb_define_module("JSON"); mExt = rb_define_module_under(mJSON, "Ext"); cParser = rb_define_class_under(mExt, "Parser", rb_cObject); eParserError = rb_path2class("JSON::ParserError"); eNestingError = rb_path2class("JSON::NestingError"); rb_define_alloc_func(cParser, cJSON_parser_s_allocate); rb_define_method(cParser, "initialize", cParser_initialize, -1); rb_define_method(cParser, "parse", cParser_parse, 0); rb_define_method(cParser, "source", cParser_source, 0); CNaN = rb_const_get(mJSON, rb_intern("NaN")); CInfinity = rb_const_get(mJSON, rb_intern("Infinity")); CMinusInfinity = rb_const_get(mJSON, rb_intern("MinusInfinity")); i_json_creatable_p = rb_intern("json_creatable?"); i_json_create = rb_intern("json_create"); i_create_id = rb_intern("create_id"); i_create_additions = rb_intern("create_additions"); i_chr = rb_intern("chr"); i_max_nesting = rb_intern("max_nesting"); i_allow_nan = rb_intern("allow_nan"); i_symbolize_names = rb_intern("symbolize_names"); i_object_class = rb_intern("object_class"); i_array_class = rb_intern("array_class"); i_decimal_class = rb_intern("decimal_class"); i_match = rb_intern("match"); i_match_string = rb_intern("match_string"); i_key_p = rb_intern("key?"); i_deep_const_get = rb_intern("deep_const_get"); i_aset = rb_intern("[]="); i_aref = rb_intern("[]"); i_leftshift = rb_intern("<<"); i_new = rb_intern("new"); } /* * Local variables: * mode: c * c-file-style: ruby * indent-tabs-mode: nil * End: */ ruby-json-2.1.0+dfsg.orig/ext/json/ext/parser/parser.h0000644000175000017500000000567513113111601022211 0ustar boutilboutil#ifndef _PARSER_H_ #define _PARSER_H_ #include "ruby.h" #ifndef HAVE_RUBY_RE_H #include "re.h" #endif #ifdef HAVE_RUBY_ST_H #include "ruby/st.h" #else #include "st.h" #endif #define option_given_p(opts, key) RTEST(rb_funcall(opts, i_key_p, 1, key)) /* unicode */ typedef unsigned long UTF32; /* at least 32 bits */ typedef unsigned short UTF16; /* at least 16 bits */ typedef unsigned char UTF8; /* typically 8 bits */ #define UNI_REPLACEMENT_CHAR (UTF32)0x0000FFFD #define UNI_SUR_HIGH_START (UTF32)0xD800 #define UNI_SUR_HIGH_END (UTF32)0xDBFF #define UNI_SUR_LOW_START (UTF32)0xDC00 #define UNI_SUR_LOW_END (UTF32)0xDFFF typedef struct JSON_ParserStruct { VALUE Vsource; char *source; long len; char *memo; VALUE create_id; int max_nesting; int allow_nan; int parsing_name; int symbolize_names; VALUE object_class; VALUE array_class; VALUE decimal_class; int create_additions; VALUE match_string; FBuffer *fbuffer; } JSON_Parser; #define GET_PARSER \ GET_PARSER_INIT; \ if (!json->Vsource) rb_raise(rb_eTypeError, "uninitialized instance") #define GET_PARSER_INIT \ JSON_Parser *json; \ TypedData_Get_Struct(self, JSON_Parser, &JSON_Parser_type, json) #define MinusInfinity "-Infinity" #define EVIL 0x666 static UTF32 unescape_unicode(const unsigned char *p); static int convert_UTF32_to_UTF8(char *buf, UTF32 ch); static char *JSON_parse_object(JSON_Parser *json, char *p, char *pe, VALUE *result, int current_nesting); static char *JSON_parse_value(JSON_Parser *json, char *p, char *pe, VALUE *result, int current_nesting); static char *JSON_parse_integer(JSON_Parser *json, char *p, char *pe, VALUE *result); static char *JSON_parse_float(JSON_Parser *json, char *p, char *pe, VALUE *result); static char *JSON_parse_array(JSON_Parser *json, char *p, char *pe, VALUE *result, int current_nesting); static VALUE json_string_unescape(VALUE result, char *string, char *stringEnd); static char *JSON_parse_string(JSON_Parser *json, char *p, char *pe, VALUE *result); static VALUE convert_encoding(VALUE source); static VALUE cParser_initialize(int argc, VALUE *argv, VALUE self); static VALUE cParser_parse(VALUE self); static void JSON_mark(void *json); static void JSON_free(void *json); static VALUE cJSON_parser_s_allocate(VALUE klass); static VALUE cParser_source(VALUE self); #ifndef ZALLOC #define ZALLOC(type) ((type *)ruby_zalloc(sizeof(type))) static inline void *ruby_zalloc(size_t n) { void *p = ruby_xmalloc(n); memset(p, 0, n); return p; } #endif #ifdef TypedData_Make_Struct static const rb_data_type_t JSON_Parser_type; #define NEW_TYPEDDATA_WRAPPER 1 #else #define TypedData_Make_Struct(klass, type, ignore, json) Data_Make_Struct(klass, type, NULL, JSON_free, json) #define TypedData_Get_Struct(self, JSON_Parser, ignore, json) Data_Get_Struct(self, JSON_Parser, json) #endif #endif ruby-json-2.1.0+dfsg.orig/ext/json/ext/generator/0000755000175000017500000000000013113111601021221 5ustar boutilboutilruby-json-2.1.0+dfsg.orig/ext/json/ext/generator/depend0000644000175000017500000000010413113111601022376 0ustar boutilboutilgenerator.o: generator.c generator.h $(srcdir)/../fbuffer/fbuffer.h ruby-json-2.1.0+dfsg.orig/ext/json/ext/generator/generator.c0000644000175000017500000013627413113111601023370 0ustar boutilboutil#include "../fbuffer/fbuffer.h" #include "generator.h" #ifdef HAVE_RUBY_ENCODING_H static VALUE CEncoding_UTF_8; static ID i_encoding, i_encode; #endif static VALUE mJSON, mExt, mGenerator, cState, mGeneratorMethods, mObject, mHash, mArray, #ifdef RUBY_INTEGER_UNIFICATION mInteger, #else mFixnum, mBignum, #endif mFloat, mString, mString_Extend, mTrueClass, mFalseClass, mNilClass, eGeneratorError, eNestingError, CRegexp_MULTILINE, CJSON_SAFE_STATE_PROTOTYPE, i_SAFE_STATE_PROTOTYPE; static ID i_to_s, i_to_json, i_new, i_indent, i_space, i_space_before, i_object_nl, i_array_nl, i_max_nesting, i_allow_nan, i_ascii_only, i_pack, i_unpack, i_create_id, i_extend, i_key_p, i_aref, i_send, i_respond_to_p, i_match, i_keys, i_depth, i_buffer_initial_length, i_dup; /* * Copyright 2001-2004 Unicode, Inc. * * Disclaimer * * This source code is provided as is by Unicode, Inc. No claims are * made as to fitness for any particular purpose. No warranties of any * kind are expressed or implied. The recipient agrees to determine * applicability of information provided. If this file has been * purchased on magnetic or optical media from Unicode, Inc., the * sole remedy for any claim will be exchange of defective media * within 90 days of receipt. * * Limitations on Rights to Redistribute This Code * * Unicode, Inc. hereby grants the right to freely use the information * supplied in this file in the creation of products supporting the * Unicode Standard, and to make copies of this file in any form * for internal or external distribution as long as this notice * remains attached. */ /* * Index into the table below with the first byte of a UTF-8 sequence to * get the number of trailing bytes that are supposed to follow it. * Note that *legal* UTF-8 values can't have 4 or 5-bytes. The table is * left as-is for anyone who may want to do such conversion, which was * allowed in earlier algorithms. */ static const char trailingBytesForUTF8[256] = { 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, 2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2, 3,3,3,3,3,3,3,3,4,4,4,4,5,5,5,5 }; /* * Magic values subtracted from a buffer value during UTF8 conversion. * This table contains as many values as there might be trailing bytes * in a UTF-8 sequence. */ static const UTF32 offsetsFromUTF8[6] = { 0x00000000UL, 0x00003080UL, 0x000E2080UL, 0x03C82080UL, 0xFA082080UL, 0x82082080UL }; /* * Utility routine to tell whether a sequence of bytes is legal UTF-8. * This must be called with the length pre-determined by the first byte. * If not calling this from ConvertUTF8to*, then the length can be set by: * length = trailingBytesForUTF8[*source]+1; * and the sequence is illegal right away if there aren't that many bytes * available. * If presented with a length > 4, this returns 0. The Unicode * definition of UTF-8 goes up to 4-byte sequences. */ static unsigned char isLegalUTF8(const UTF8 *source, unsigned long length) { UTF8 a; const UTF8 *srcptr = source+length; switch (length) { default: return 0; /* Everything else falls through when "1"... */ case 4: if ((a = (*--srcptr)) < 0x80 || a > 0xBF) return 0; case 3: if ((a = (*--srcptr)) < 0x80 || a > 0xBF) return 0; case 2: if ((a = (*--srcptr)) > 0xBF) return 0; switch (*source) { /* no fall-through in this inner switch */ case 0xE0: if (a < 0xA0) return 0; break; case 0xED: if (a > 0x9F) return 0; break; case 0xF0: if (a < 0x90) return 0; break; case 0xF4: if (a > 0x8F) return 0; break; default: if (a < 0x80) return 0; } case 1: if (*source >= 0x80 && *source < 0xC2) return 0; } if (*source > 0xF4) return 0; return 1; } /* Escapes the UTF16 character and stores the result in the buffer buf. */ static void unicode_escape(char *buf, UTF16 character) { const char *digits = "0123456789abcdef"; buf[2] = digits[character >> 12]; buf[3] = digits[(character >> 8) & 0xf]; buf[4] = digits[(character >> 4) & 0xf]; buf[5] = digits[character & 0xf]; } /* Escapes the UTF16 character and stores the result in the buffer buf, then * the buffer buf is appended to the FBuffer buffer. */ static void unicode_escape_to_buffer(FBuffer *buffer, char buf[6], UTF16 character) { unicode_escape(buf, character); fbuffer_append(buffer, buf, 6); } /* Converts string to a JSON string in FBuffer buffer, where all but the ASCII * and control characters are JSON escaped. */ static void convert_UTF8_to_JSON_ASCII(FBuffer *buffer, VALUE string) { const UTF8 *source = (UTF8 *) RSTRING_PTR(string); const UTF8 *sourceEnd = source + RSTRING_LEN(string); char buf[6] = { '\\', 'u' }; while (source < sourceEnd) { UTF32 ch = 0; unsigned short extraBytesToRead = trailingBytesForUTF8[*source]; if (source + extraBytesToRead >= sourceEnd) { rb_raise(rb_path2class("JSON::GeneratorError"), "partial character in source, but hit end"); } if (!isLegalUTF8(source, extraBytesToRead+1)) { rb_raise(rb_path2class("JSON::GeneratorError"), "source sequence is illegal/malformed utf-8"); } /* * The cases all fall through. See "Note A" below. */ switch (extraBytesToRead) { case 5: ch += *source++; ch <<= 6; /* remember, illegal UTF-8 */ case 4: ch += *source++; ch <<= 6; /* remember, illegal UTF-8 */ case 3: ch += *source++; ch <<= 6; case 2: ch += *source++; ch <<= 6; case 1: ch += *source++; ch <<= 6; case 0: ch += *source++; } ch -= offsetsFromUTF8[extraBytesToRead]; if (ch <= UNI_MAX_BMP) { /* Target is a character <= 0xFFFF */ /* UTF-16 surrogate values are illegal in UTF-32 */ if (ch >= UNI_SUR_HIGH_START && ch <= UNI_SUR_LOW_END) { #if UNI_STRICT_CONVERSION source -= (extraBytesToRead+1); /* return to the illegal value itself */ rb_raise(rb_path2class("JSON::GeneratorError"), "source sequence is illegal/malformed utf-8"); #else unicode_escape_to_buffer(buffer, buf, UNI_REPLACEMENT_CHAR); #endif } else { /* normal case */ if (ch >= 0x20 && ch <= 0x7f) { switch (ch) { case '\\': fbuffer_append(buffer, "\\\\", 2); break; case '"': fbuffer_append(buffer, "\\\"", 2); break; default: fbuffer_append_char(buffer, (char)ch); break; } } else { switch (ch) { case '\n': fbuffer_append(buffer, "\\n", 2); break; case '\r': fbuffer_append(buffer, "\\r", 2); break; case '\t': fbuffer_append(buffer, "\\t", 2); break; case '\f': fbuffer_append(buffer, "\\f", 2); break; case '\b': fbuffer_append(buffer, "\\b", 2); break; default: unicode_escape_to_buffer(buffer, buf, (UTF16) ch); break; } } } } else if (ch > UNI_MAX_UTF16) { #if UNI_STRICT_CONVERSION source -= (extraBytesToRead+1); /* return to the start */ rb_raise(rb_path2class("JSON::GeneratorError"), "source sequence is illegal/malformed utf8"); #else unicode_escape_to_buffer(buffer, buf, UNI_REPLACEMENT_CHAR); #endif } else { /* target is a character in range 0xFFFF - 0x10FFFF. */ ch -= halfBase; unicode_escape_to_buffer(buffer, buf, (UTF16)((ch >> halfShift) + UNI_SUR_HIGH_START)); unicode_escape_to_buffer(buffer, buf, (UTF16)((ch & halfMask) + UNI_SUR_LOW_START)); } } RB_GC_GUARD(string); } /* Converts string to a JSON string in FBuffer buffer, where only the * characters required by the JSON standard are JSON escaped. The remaining * characters (should be UTF8) are just passed through and appended to the * result. */ static void convert_UTF8_to_JSON(FBuffer *buffer, VALUE string) { const char *ptr = RSTRING_PTR(string), *p; unsigned long len = RSTRING_LEN(string), start = 0, end = 0; const char *escape = NULL; int escape_len; unsigned char c; char buf[6] = { '\\', 'u' }; for (start = 0, end = 0; end < len;) { p = ptr + end; c = (unsigned char) *p; if (c < 0x20) { switch (c) { case '\n': escape = "\\n"; escape_len = 2; break; case '\r': escape = "\\r"; escape_len = 2; break; case '\t': escape = "\\t"; escape_len = 2; break; case '\f': escape = "\\f"; escape_len = 2; break; case '\b': escape = "\\b"; escape_len = 2; break; default: unicode_escape(buf, (UTF16) *p); escape = buf; escape_len = 6; break; } } else { switch (c) { case '\\': escape = "\\\\"; escape_len = 2; break; case '"': escape = "\\\""; escape_len = 2; break; default: { unsigned short clen = trailingBytesForUTF8[c] + 1; if (end + clen > len) { rb_raise(rb_path2class("JSON::GeneratorError"), "partial character in source, but hit end"); } if (!isLegalUTF8((UTF8 *) p, clen)) { rb_raise(rb_path2class("JSON::GeneratorError"), "source sequence is illegal/malformed utf-8"); } end += clen; } continue; break; } } fbuffer_append(buffer, ptr + start, end - start); fbuffer_append(buffer, escape, escape_len); start = ++end; escape = NULL; } fbuffer_append(buffer, ptr + start, end - start); } static char *fstrndup(const char *ptr, unsigned long len) { char *result; if (len <= 0) return NULL; result = ALLOC_N(char, len); memcpy(result, ptr, len); return result; } /* * Document-module: JSON::Ext::Generator * * This is the JSON generator implemented as a C extension. It can be * configured to be used by setting * * JSON.generator = JSON::Ext::Generator * * with the method generator= in JSON. * */ /* * call-seq: to_json(state = nil) * * Returns a JSON string containing a JSON object, that is generated from * this Hash instance. * _state_ is a JSON::State object, that can also be used to configure the * produced JSON string output further. */ static VALUE mHash_to_json(int argc, VALUE *argv, VALUE self) { GENERATE_JSON(object); } /* * call-seq: to_json(state = nil) * * Returns a JSON string containing a JSON array, that is generated from * this Array instance. * _state_ is a JSON::State object, that can also be used to configure the * produced JSON string output further. */ static VALUE mArray_to_json(int argc, VALUE *argv, VALUE self) { GENERATE_JSON(array); } #ifdef RUBY_INTEGER_UNIFICATION /* * call-seq: to_json(*) * * Returns a JSON string representation for this Integer number. */ static VALUE mInteger_to_json(int argc, VALUE *argv, VALUE self) { GENERATE_JSON(integer); } #else /* * call-seq: to_json(*) * * Returns a JSON string representation for this Integer number. */ static VALUE mFixnum_to_json(int argc, VALUE *argv, VALUE self) { GENERATE_JSON(fixnum); } /* * call-seq: to_json(*) * * Returns a JSON string representation for this Integer number. */ static VALUE mBignum_to_json(int argc, VALUE *argv, VALUE self) { GENERATE_JSON(bignum); } #endif /* * call-seq: to_json(*) * * Returns a JSON string representation for this Float number. */ static VALUE mFloat_to_json(int argc, VALUE *argv, VALUE self) { GENERATE_JSON(float); } /* * call-seq: String.included(modul) * * Extends _modul_ with the String::Extend module. */ static VALUE mString_included_s(VALUE self, VALUE modul) { VALUE result = rb_funcall(modul, i_extend, 1, mString_Extend); return result; } /* * call-seq: to_json(*) * * This string should be encoded with UTF-8 A call to this method * returns a JSON string encoded with UTF16 big endian characters as * \u????. */ static VALUE mString_to_json(int argc, VALUE *argv, VALUE self) { GENERATE_JSON(string); } /* * call-seq: to_json_raw_object() * * This method creates a raw object hash, that can be nested into * other data structures and will be generated as a raw string. This * method should be used, if you want to convert raw strings to JSON * instead of UTF-8 strings, e. g. binary data. */ static VALUE mString_to_json_raw_object(VALUE self) { VALUE ary; VALUE result = rb_hash_new(); rb_hash_aset(result, rb_funcall(mJSON, i_create_id, 0), rb_class_name(rb_obj_class(self))); ary = rb_funcall(self, i_unpack, 1, rb_str_new2("C*")); rb_hash_aset(result, rb_str_new2("raw"), ary); return result; } /* * call-seq: to_json_raw(*args) * * This method creates a JSON text from the result of a call to * to_json_raw_object of this String. */ static VALUE mString_to_json_raw(int argc, VALUE *argv, VALUE self) { VALUE obj = mString_to_json_raw_object(self); Check_Type(obj, T_HASH); return mHash_to_json(argc, argv, obj); } /* * call-seq: json_create(o) * * Raw Strings are JSON Objects (the raw bytes are stored in an array for the * key "raw"). The Ruby String can be created by this module method. */ static VALUE mString_Extend_json_create(VALUE self, VALUE o) { VALUE ary; Check_Type(o, T_HASH); ary = rb_hash_aref(o, rb_str_new2("raw")); return rb_funcall(ary, i_pack, 1, rb_str_new2("C*")); } /* * call-seq: to_json(*) * * Returns a JSON string for true: 'true'. */ static VALUE mTrueClass_to_json(int argc, VALUE *argv, VALUE self) { GENERATE_JSON(true); } /* * call-seq: to_json(*) * * Returns a JSON string for false: 'false'. */ static VALUE mFalseClass_to_json(int argc, VALUE *argv, VALUE self) { GENERATE_JSON(false); } /* * call-seq: to_json(*) * * Returns a JSON string for nil: 'null'. */ static VALUE mNilClass_to_json(int argc, VALUE *argv, VALUE self) { GENERATE_JSON(null); } /* * call-seq: to_json(*) * * Converts this object to a string (calling #to_s), converts * it to a JSON string, and returns the result. This is a fallback, if no * special method #to_json was defined for some object. */ static VALUE mObject_to_json(int argc, VALUE *argv, VALUE self) { VALUE state; VALUE string = rb_funcall(self, i_to_s, 0); rb_scan_args(argc, argv, "01", &state); Check_Type(string, T_STRING); state = cState_from_state_s(cState, state); return cState_partial_generate(state, string); } static void State_free(void *ptr) { JSON_Generator_State *state = ptr; if (state->indent) ruby_xfree(state->indent); if (state->space) ruby_xfree(state->space); if (state->space_before) ruby_xfree(state->space_before); if (state->object_nl) ruby_xfree(state->object_nl); if (state->array_nl) ruby_xfree(state->array_nl); if (state->array_delim) fbuffer_free(state->array_delim); if (state->object_delim) fbuffer_free(state->object_delim); if (state->object_delim2) fbuffer_free(state->object_delim2); ruby_xfree(state); } static size_t State_memsize(const void *ptr) { const JSON_Generator_State *state = ptr; size_t size = sizeof(*state); if (state->indent) size += state->indent_len + 1; if (state->space) size += state->space_len + 1; if (state->space_before) size += state->space_before_len + 1; if (state->object_nl) size += state->object_nl_len + 1; if (state->array_nl) size += state->array_nl_len + 1; if (state->array_delim) size += FBUFFER_CAPA(state->array_delim); if (state->object_delim) size += FBUFFER_CAPA(state->object_delim); if (state->object_delim2) size += FBUFFER_CAPA(state->object_delim2); return size; } #ifdef NEW_TYPEDDATA_WRAPPER static const rb_data_type_t JSON_Generator_State_type = { "JSON/Generator/State", {NULL, State_free, State_memsize,}, #ifdef RUBY_TYPED_FREE_IMMEDIATELY 0, 0, RUBY_TYPED_FREE_IMMEDIATELY, #endif }; #endif static VALUE cState_s_allocate(VALUE klass) { JSON_Generator_State *state; return TypedData_Make_Struct(klass, JSON_Generator_State, &JSON_Generator_State_type, state); } /* * call-seq: configure(opts) * * Configure this State instance with the Hash _opts_, and return * itself. */ static VALUE cState_configure(VALUE self, VALUE opts) { VALUE tmp; GET_STATE(self); tmp = rb_check_convert_type(opts, T_HASH, "Hash", "to_hash"); if (NIL_P(tmp)) tmp = rb_convert_type(opts, T_HASH, "Hash", "to_h"); opts = tmp; tmp = rb_hash_aref(opts, ID2SYM(i_indent)); if (RTEST(tmp)) { unsigned long len; Check_Type(tmp, T_STRING); len = RSTRING_LEN(tmp); state->indent = fstrndup(RSTRING_PTR(tmp), len + 1); state->indent_len = len; } tmp = rb_hash_aref(opts, ID2SYM(i_space)); if (RTEST(tmp)) { unsigned long len; Check_Type(tmp, T_STRING); len = RSTRING_LEN(tmp); state->space = fstrndup(RSTRING_PTR(tmp), len + 1); state->space_len = len; } tmp = rb_hash_aref(opts, ID2SYM(i_space_before)); if (RTEST(tmp)) { unsigned long len; Check_Type(tmp, T_STRING); len = RSTRING_LEN(tmp); state->space_before = fstrndup(RSTRING_PTR(tmp), len + 1); state->space_before_len = len; } tmp = rb_hash_aref(opts, ID2SYM(i_array_nl)); if (RTEST(tmp)) { unsigned long len; Check_Type(tmp, T_STRING); len = RSTRING_LEN(tmp); state->array_nl = fstrndup(RSTRING_PTR(tmp), len + 1); state->array_nl_len = len; } tmp = rb_hash_aref(opts, ID2SYM(i_object_nl)); if (RTEST(tmp)) { unsigned long len; Check_Type(tmp, T_STRING); len = RSTRING_LEN(tmp); state->object_nl = fstrndup(RSTRING_PTR(tmp), len + 1); state->object_nl_len = len; } tmp = ID2SYM(i_max_nesting); state->max_nesting = 100; if (option_given_p(opts, tmp)) { VALUE max_nesting = rb_hash_aref(opts, tmp); if (RTEST(max_nesting)) { Check_Type(max_nesting, T_FIXNUM); state->max_nesting = FIX2LONG(max_nesting); } else { state->max_nesting = 0; } } tmp = ID2SYM(i_depth); state->depth = 0; if (option_given_p(opts, tmp)) { VALUE depth = rb_hash_aref(opts, tmp); if (RTEST(depth)) { Check_Type(depth, T_FIXNUM); state->depth = FIX2LONG(depth); } else { state->depth = 0; } } tmp = ID2SYM(i_buffer_initial_length); if (option_given_p(opts, tmp)) { VALUE buffer_initial_length = rb_hash_aref(opts, tmp); if (RTEST(buffer_initial_length)) { long initial_length; Check_Type(buffer_initial_length, T_FIXNUM); initial_length = FIX2LONG(buffer_initial_length); if (initial_length > 0) state->buffer_initial_length = initial_length; } } tmp = rb_hash_aref(opts, ID2SYM(i_allow_nan)); state->allow_nan = RTEST(tmp); tmp = rb_hash_aref(opts, ID2SYM(i_ascii_only)); state->ascii_only = RTEST(tmp); return self; } static void set_state_ivars(VALUE hash, VALUE state) { VALUE ivars = rb_obj_instance_variables(state); int i = 0; for (i = 0; i < RARRAY_LEN(ivars); i++) { VALUE key = rb_funcall(rb_ary_entry(ivars, i), i_to_s, 0); long key_len = RSTRING_LEN(key); VALUE value = rb_iv_get(state, StringValueCStr(key)); rb_hash_aset(hash, rb_str_intern(rb_str_substr(key, 1, key_len - 1)), value); } } /* * call-seq: to_h * * Returns the configuration instance variables as a hash, that can be * passed to the configure method. */ static VALUE cState_to_h(VALUE self) { VALUE result = rb_hash_new(); GET_STATE(self); set_state_ivars(result, self); rb_hash_aset(result, ID2SYM(i_indent), rb_str_new(state->indent, state->indent_len)); rb_hash_aset(result, ID2SYM(i_space), rb_str_new(state->space, state->space_len)); rb_hash_aset(result, ID2SYM(i_space_before), rb_str_new(state->space_before, state->space_before_len)); rb_hash_aset(result, ID2SYM(i_object_nl), rb_str_new(state->object_nl, state->object_nl_len)); rb_hash_aset(result, ID2SYM(i_array_nl), rb_str_new(state->array_nl, state->array_nl_len)); rb_hash_aset(result, ID2SYM(i_allow_nan), state->allow_nan ? Qtrue : Qfalse); rb_hash_aset(result, ID2SYM(i_ascii_only), state->ascii_only ? Qtrue : Qfalse); rb_hash_aset(result, ID2SYM(i_max_nesting), LONG2FIX(state->max_nesting)); rb_hash_aset(result, ID2SYM(i_depth), LONG2FIX(state->depth)); rb_hash_aset(result, ID2SYM(i_buffer_initial_length), LONG2FIX(state->buffer_initial_length)); return result; } /* * call-seq: [](name) * * Returns the value returned by method +name+. */ static VALUE cState_aref(VALUE self, VALUE name) { name = rb_funcall(name, i_to_s, 0); if (RTEST(rb_funcall(self, i_respond_to_p, 1, name))) { return rb_funcall(self, i_send, 1, name); } else { return rb_ivar_get(self, rb_intern_str(rb_str_concat(rb_str_new2("@"), name))); } } /* * call-seq: []=(name, value) * * Sets the attribute name to value. */ static VALUE cState_aset(VALUE self, VALUE name, VALUE value) { VALUE name_writer; name = rb_funcall(name, i_to_s, 0); name_writer = rb_str_cat2(rb_str_dup(name), "="); if (RTEST(rb_funcall(self, i_respond_to_p, 1, name_writer))) { return rb_funcall(self, i_send, 2, name_writer, value); } else { rb_ivar_set(self, rb_intern_str(rb_str_concat(rb_str_new2("@"), name)), value); } return Qnil; } static void generate_json_object(FBuffer *buffer, VALUE Vstate, JSON_Generator_State *state, VALUE obj) { char *object_nl = state->object_nl; long object_nl_len = state->object_nl_len; char *indent = state->indent; long indent_len = state->indent_len; long max_nesting = state->max_nesting; char *delim = FBUFFER_PTR(state->object_delim); long delim_len = FBUFFER_LEN(state->object_delim); char *delim2 = FBUFFER_PTR(state->object_delim2); long delim2_len = FBUFFER_LEN(state->object_delim2); long depth = ++state->depth; int i, j; VALUE key, key_to_s, keys; if (max_nesting != 0 && depth > max_nesting) { fbuffer_free(buffer); rb_raise(eNestingError, "nesting of %ld is too deep", --state->depth); } fbuffer_append_char(buffer, '{'); keys = rb_funcall(obj, i_keys, 0); for(i = 0; i < RARRAY_LEN(keys); i++) { if (i > 0) fbuffer_append(buffer, delim, delim_len); if (object_nl) { fbuffer_append(buffer, object_nl, object_nl_len); } if (indent) { for (j = 0; j < depth; j++) { fbuffer_append(buffer, indent, indent_len); } } key = rb_ary_entry(keys, i); key_to_s = rb_funcall(key, i_to_s, 0); Check_Type(key_to_s, T_STRING); generate_json(buffer, Vstate, state, key_to_s); fbuffer_append(buffer, delim2, delim2_len); generate_json(buffer, Vstate, state, rb_hash_aref(obj, key)); } depth = --state->depth; if (object_nl) { fbuffer_append(buffer, object_nl, object_nl_len); if (indent) { for (j = 0; j < depth; j++) { fbuffer_append(buffer, indent, indent_len); } } } fbuffer_append_char(buffer, '}'); } static void generate_json_array(FBuffer *buffer, VALUE Vstate, JSON_Generator_State *state, VALUE obj) { char *array_nl = state->array_nl; long array_nl_len = state->array_nl_len; char *indent = state->indent; long indent_len = state->indent_len; long max_nesting = state->max_nesting; char *delim = FBUFFER_PTR(state->array_delim); long delim_len = FBUFFER_LEN(state->array_delim); long depth = ++state->depth; int i, j; if (max_nesting != 0 && depth > max_nesting) { fbuffer_free(buffer); rb_raise(eNestingError, "nesting of %ld is too deep", --state->depth); } fbuffer_append_char(buffer, '['); if (array_nl) fbuffer_append(buffer, array_nl, array_nl_len); for(i = 0; i < RARRAY_LEN(obj); i++) { if (i > 0) fbuffer_append(buffer, delim, delim_len); if (indent) { for (j = 0; j < depth; j++) { fbuffer_append(buffer, indent, indent_len); } } generate_json(buffer, Vstate, state, rb_ary_entry(obj, i)); } state->depth = --depth; if (array_nl) { fbuffer_append(buffer, array_nl, array_nl_len); if (indent) { for (j = 0; j < depth; j++) { fbuffer_append(buffer, indent, indent_len); } } } fbuffer_append_char(buffer, ']'); } static void generate_json_string(FBuffer *buffer, VALUE Vstate, JSON_Generator_State *state, VALUE obj) { fbuffer_append_char(buffer, '"'); #ifdef HAVE_RUBY_ENCODING_H obj = rb_funcall(obj, i_encode, 1, CEncoding_UTF_8); #endif if (state->ascii_only) { convert_UTF8_to_JSON_ASCII(buffer, obj); } else { convert_UTF8_to_JSON(buffer, obj); } fbuffer_append_char(buffer, '"'); } static void generate_json_null(FBuffer *buffer, VALUE Vstate, JSON_Generator_State *state, VALUE obj) { fbuffer_append(buffer, "null", 4); } static void generate_json_false(FBuffer *buffer, VALUE Vstate, JSON_Generator_State *state, VALUE obj) { fbuffer_append(buffer, "false", 5); } static void generate_json_true(FBuffer *buffer, VALUE Vstate, JSON_Generator_State *state, VALUE obj) { fbuffer_append(buffer, "true", 4); } static void generate_json_fixnum(FBuffer *buffer, VALUE Vstate, JSON_Generator_State *state, VALUE obj) { fbuffer_append_long(buffer, FIX2LONG(obj)); } static void generate_json_bignum(FBuffer *buffer, VALUE Vstate, JSON_Generator_State *state, VALUE obj) { VALUE tmp = rb_funcall(obj, i_to_s, 0); fbuffer_append_str(buffer, tmp); } #ifdef RUBY_INTEGER_UNIFICATION static void generate_json_integer(FBuffer *buffer, VALUE Vstate, JSON_Generator_State *state, VALUE obj) { if (FIXNUM_P(obj)) generate_json_fixnum(buffer, Vstate, state, obj); else generate_json_bignum(buffer, Vstate, state, obj); } #endif static void generate_json_float(FBuffer *buffer, VALUE Vstate, JSON_Generator_State *state, VALUE obj) { double value = RFLOAT_VALUE(obj); char allow_nan = state->allow_nan; VALUE tmp = rb_funcall(obj, i_to_s, 0); if (!allow_nan) { if (isinf(value)) { fbuffer_free(buffer); rb_raise(eGeneratorError, "%u: %"PRIsVALUE" not allowed in JSON", __LINE__, RB_OBJ_STRING(tmp)); } else if (isnan(value)) { fbuffer_free(buffer); rb_raise(eGeneratorError, "%u: %"PRIsVALUE" not allowed in JSON", __LINE__, RB_OBJ_STRING(tmp)); } } fbuffer_append_str(buffer, tmp); } static void generate_json(FBuffer *buffer, VALUE Vstate, JSON_Generator_State *state, VALUE obj) { VALUE tmp; VALUE klass = CLASS_OF(obj); if (klass == rb_cHash) { generate_json_object(buffer, Vstate, state, obj); } else if (klass == rb_cArray) { generate_json_array(buffer, Vstate, state, obj); } else if (klass == rb_cString) { generate_json_string(buffer, Vstate, state, obj); } else if (obj == Qnil) { generate_json_null(buffer, Vstate, state, obj); } else if (obj == Qfalse) { generate_json_false(buffer, Vstate, state, obj); } else if (obj == Qtrue) { generate_json_true(buffer, Vstate, state, obj); } else if (FIXNUM_P(obj)) { generate_json_fixnum(buffer, Vstate, state, obj); } else if (RB_TYPE_P(obj, T_BIGNUM)) { generate_json_bignum(buffer, Vstate, state, obj); } else if (klass == rb_cFloat) { generate_json_float(buffer, Vstate, state, obj); } else if (rb_respond_to(obj, i_to_json)) { tmp = rb_funcall(obj, i_to_json, 1, Vstate); Check_Type(tmp, T_STRING); fbuffer_append_str(buffer, tmp); } else { tmp = rb_funcall(obj, i_to_s, 0); Check_Type(tmp, T_STRING); generate_json_string(buffer, Vstate, state, tmp); } } static FBuffer *cState_prepare_buffer(VALUE self) { FBuffer *buffer; GET_STATE(self); buffer = fbuffer_alloc(state->buffer_initial_length); if (state->object_delim) { fbuffer_clear(state->object_delim); } else { state->object_delim = fbuffer_alloc(16); } fbuffer_append_char(state->object_delim, ','); if (state->object_delim2) { fbuffer_clear(state->object_delim2); } else { state->object_delim2 = fbuffer_alloc(16); } if (state->space_before) fbuffer_append(state->object_delim2, state->space_before, state->space_before_len); fbuffer_append_char(state->object_delim2, ':'); if (state->space) fbuffer_append(state->object_delim2, state->space, state->space_len); if (state->array_delim) { fbuffer_clear(state->array_delim); } else { state->array_delim = fbuffer_alloc(16); } fbuffer_append_char(state->array_delim, ','); if (state->array_nl) fbuffer_append(state->array_delim, state->array_nl, state->array_nl_len); return buffer; } static VALUE cState_partial_generate(VALUE self, VALUE obj) { FBuffer *buffer = cState_prepare_buffer(self); GET_STATE(self); generate_json(buffer, self, state, obj); return fbuffer_to_s(buffer); } /* * call-seq: generate(obj) * * Generates a valid JSON document from object +obj+ and returns the * result. If no valid JSON document can be created this method raises a * GeneratorError exception. */ static VALUE cState_generate(VALUE self, VALUE obj) { VALUE result = cState_partial_generate(self, obj); GET_STATE(self); (void)state; return result; } /* * call-seq: new(opts = {}) * * Instantiates a new State object, configured by _opts_. * * _opts_ can have the following keys: * * * *indent*: a string used to indent levels (default: ''), * * *space*: a string that is put after, a : or , delimiter (default: ''), * * *space_before*: a string that is put before a : pair delimiter (default: ''), * * *object_nl*: a string that is put at the end of a JSON object (default: ''), * * *array_nl*: a string that is put at the end of a JSON array (default: ''), * * *allow_nan*: true if NaN, Infinity, and -Infinity should be * generated, otherwise an exception is thrown, if these values are * encountered. This options defaults to false. * * *buffer_initial_length*: sets the initial length of the generator's * internal buffer. */ static VALUE cState_initialize(int argc, VALUE *argv, VALUE self) { VALUE opts; GET_STATE(self); state->max_nesting = 100; state->buffer_initial_length = FBUFFER_INITIAL_LENGTH_DEFAULT; rb_scan_args(argc, argv, "01", &opts); if (!NIL_P(opts)) cState_configure(self, opts); return self; } /* * call-seq: initialize_copy(orig) * * Initializes this object from orig if it can be duplicated/cloned and returns * it. */ static VALUE cState_init_copy(VALUE obj, VALUE orig) { JSON_Generator_State *objState, *origState; if (obj == orig) return obj; GET_STATE_TO(obj, objState); GET_STATE_TO(orig, origState); if (!objState) rb_raise(rb_eArgError, "unallocated JSON::State"); MEMCPY(objState, origState, JSON_Generator_State, 1); objState->indent = fstrndup(origState->indent, origState->indent_len); objState->space = fstrndup(origState->space, origState->space_len); objState->space_before = fstrndup(origState->space_before, origState->space_before_len); objState->object_nl = fstrndup(origState->object_nl, origState->object_nl_len); objState->array_nl = fstrndup(origState->array_nl, origState->array_nl_len); if (origState->array_delim) objState->array_delim = fbuffer_dup(origState->array_delim); if (origState->object_delim) objState->object_delim = fbuffer_dup(origState->object_delim); if (origState->object_delim2) objState->object_delim2 = fbuffer_dup(origState->object_delim2); return obj; } /* * call-seq: from_state(opts) * * Creates a State object from _opts_, which ought to be Hash to create a * new State instance configured by _opts_, something else to create an * unconfigured instance. If _opts_ is a State object, it is just returned. */ static VALUE cState_from_state_s(VALUE self, VALUE opts) { if (rb_obj_is_kind_of(opts, self)) { return opts; } else if (rb_obj_is_kind_of(opts, rb_cHash)) { return rb_funcall(self, i_new, 1, opts); } else { if (NIL_P(CJSON_SAFE_STATE_PROTOTYPE)) { CJSON_SAFE_STATE_PROTOTYPE = rb_const_get(mJSON, i_SAFE_STATE_PROTOTYPE); } return rb_funcall(CJSON_SAFE_STATE_PROTOTYPE, i_dup, 0); } } /* * call-seq: indent() * * Returns the string that is used to indent levels in the JSON text. */ static VALUE cState_indent(VALUE self) { GET_STATE(self); return state->indent ? rb_str_new(state->indent, state->indent_len) : rb_str_new2(""); } /* * call-seq: indent=(indent) * * Sets the string that is used to indent levels in the JSON text. */ static VALUE cState_indent_set(VALUE self, VALUE indent) { unsigned long len; GET_STATE(self); Check_Type(indent, T_STRING); len = RSTRING_LEN(indent); if (len == 0) { if (state->indent) { ruby_xfree(state->indent); state->indent = NULL; state->indent_len = 0; } } else { if (state->indent) ruby_xfree(state->indent); state->indent = fstrndup(RSTRING_PTR(indent), len); state->indent_len = len; } return Qnil; } /* * call-seq: space() * * Returns the string that is used to insert a space between the tokens in a JSON * string. */ static VALUE cState_space(VALUE self) { GET_STATE(self); return state->space ? rb_str_new(state->space, state->space_len) : rb_str_new2(""); } /* * call-seq: space=(space) * * Sets _space_ to the string that is used to insert a space between the tokens in a JSON * string. */ static VALUE cState_space_set(VALUE self, VALUE space) { unsigned long len; GET_STATE(self); Check_Type(space, T_STRING); len = RSTRING_LEN(space); if (len == 0) { if (state->space) { ruby_xfree(state->space); state->space = NULL; state->space_len = 0; } } else { if (state->space) ruby_xfree(state->space); state->space = fstrndup(RSTRING_PTR(space), len); state->space_len = len; } return Qnil; } /* * call-seq: space_before() * * Returns the string that is used to insert a space before the ':' in JSON objects. */ static VALUE cState_space_before(VALUE self) { GET_STATE(self); return state->space_before ? rb_str_new(state->space_before, state->space_before_len) : rb_str_new2(""); } /* * call-seq: space_before=(space_before) * * Sets the string that is used to insert a space before the ':' in JSON objects. */ static VALUE cState_space_before_set(VALUE self, VALUE space_before) { unsigned long len; GET_STATE(self); Check_Type(space_before, T_STRING); len = RSTRING_LEN(space_before); if (len == 0) { if (state->space_before) { ruby_xfree(state->space_before); state->space_before = NULL; state->space_before_len = 0; } } else { if (state->space_before) ruby_xfree(state->space_before); state->space_before = fstrndup(RSTRING_PTR(space_before), len); state->space_before_len = len; } return Qnil; } /* * call-seq: object_nl() * * This string is put at the end of a line that holds a JSON object (or * Hash). */ static VALUE cState_object_nl(VALUE self) { GET_STATE(self); return state->object_nl ? rb_str_new(state->object_nl, state->object_nl_len) : rb_str_new2(""); } /* * call-seq: object_nl=(object_nl) * * This string is put at the end of a line that holds a JSON object (or * Hash). */ static VALUE cState_object_nl_set(VALUE self, VALUE object_nl) { unsigned long len; GET_STATE(self); Check_Type(object_nl, T_STRING); len = RSTRING_LEN(object_nl); if (len == 0) { if (state->object_nl) { ruby_xfree(state->object_nl); state->object_nl = NULL; } } else { if (state->object_nl) ruby_xfree(state->object_nl); state->object_nl = fstrndup(RSTRING_PTR(object_nl), len); state->object_nl_len = len; } return Qnil; } /* * call-seq: array_nl() * * This string is put at the end of a line that holds a JSON array. */ static VALUE cState_array_nl(VALUE self) { GET_STATE(self); return state->array_nl ? rb_str_new(state->array_nl, state->array_nl_len) : rb_str_new2(""); } /* * call-seq: array_nl=(array_nl) * * This string is put at the end of a line that holds a JSON array. */ static VALUE cState_array_nl_set(VALUE self, VALUE array_nl) { unsigned long len; GET_STATE(self); Check_Type(array_nl, T_STRING); len = RSTRING_LEN(array_nl); if (len == 0) { if (state->array_nl) { ruby_xfree(state->array_nl); state->array_nl = NULL; } } else { if (state->array_nl) ruby_xfree(state->array_nl); state->array_nl = fstrndup(RSTRING_PTR(array_nl), len); state->array_nl_len = len; } return Qnil; } /* * call-seq: check_circular? * * Returns true, if circular data structures should be checked, * otherwise returns false. */ static VALUE cState_check_circular_p(VALUE self) { GET_STATE(self); return state->max_nesting ? Qtrue : Qfalse; } /* * call-seq: max_nesting * * This integer returns the maximum level of data structure nesting in * the generated JSON, max_nesting = 0 if no maximum is checked. */ static VALUE cState_max_nesting(VALUE self) { GET_STATE(self); return LONG2FIX(state->max_nesting); } /* * call-seq: max_nesting=(depth) * * This sets the maximum level of data structure nesting in the generated JSON * to the integer depth, max_nesting = 0 if no maximum should be checked. */ static VALUE cState_max_nesting_set(VALUE self, VALUE depth) { GET_STATE(self); Check_Type(depth, T_FIXNUM); return state->max_nesting = FIX2LONG(depth); } /* * call-seq: allow_nan? * * Returns true, if NaN, Infinity, and -Infinity should be generated, otherwise * returns false. */ static VALUE cState_allow_nan_p(VALUE self) { GET_STATE(self); return state->allow_nan ? Qtrue : Qfalse; } /* * call-seq: ascii_only? * * Returns true, if NaN, Infinity, and -Infinity should be generated, otherwise * returns false. */ static VALUE cState_ascii_only_p(VALUE self) { GET_STATE(self); return state->ascii_only ? Qtrue : Qfalse; } /* * call-seq: depth * * This integer returns the current depth of data structure nesting. */ static VALUE cState_depth(VALUE self) { GET_STATE(self); return LONG2FIX(state->depth); } /* * call-seq: depth=(depth) * * This sets the maximum level of data structure nesting in the generated JSON * to the integer depth, max_nesting = 0 if no maximum should be checked. */ static VALUE cState_depth_set(VALUE self, VALUE depth) { GET_STATE(self); Check_Type(depth, T_FIXNUM); state->depth = FIX2LONG(depth); return Qnil; } /* * call-seq: buffer_initial_length * * This integer returns the current initial length of the buffer. */ static VALUE cState_buffer_initial_length(VALUE self) { GET_STATE(self); return LONG2FIX(state->buffer_initial_length); } /* * call-seq: buffer_initial_length=(length) * * This sets the initial length of the buffer to +length+, if +length+ > 0, * otherwise its value isn't changed. */ static VALUE cState_buffer_initial_length_set(VALUE self, VALUE buffer_initial_length) { long initial_length; GET_STATE(self); Check_Type(buffer_initial_length, T_FIXNUM); initial_length = FIX2LONG(buffer_initial_length); if (initial_length > 0) { state->buffer_initial_length = initial_length; } return Qnil; } /* * */ void Init_generator(void) { rb_require("json/common"); mJSON = rb_define_module("JSON"); mExt = rb_define_module_under(mJSON, "Ext"); mGenerator = rb_define_module_under(mExt, "Generator"); eGeneratorError = rb_path2class("JSON::GeneratorError"); eNestingError = rb_path2class("JSON::NestingError"); cState = rb_define_class_under(mGenerator, "State", rb_cObject); rb_define_alloc_func(cState, cState_s_allocate); rb_define_singleton_method(cState, "from_state", cState_from_state_s, 1); rb_define_method(cState, "initialize", cState_initialize, -1); rb_define_method(cState, "initialize_copy", cState_init_copy, 1); rb_define_method(cState, "indent", cState_indent, 0); rb_define_method(cState, "indent=", cState_indent_set, 1); rb_define_method(cState, "space", cState_space, 0); rb_define_method(cState, "space=", cState_space_set, 1); rb_define_method(cState, "space_before", cState_space_before, 0); rb_define_method(cState, "space_before=", cState_space_before_set, 1); rb_define_method(cState, "object_nl", cState_object_nl, 0); rb_define_method(cState, "object_nl=", cState_object_nl_set, 1); rb_define_method(cState, "array_nl", cState_array_nl, 0); rb_define_method(cState, "array_nl=", cState_array_nl_set, 1); rb_define_method(cState, "max_nesting", cState_max_nesting, 0); rb_define_method(cState, "max_nesting=", cState_max_nesting_set, 1); rb_define_method(cState, "check_circular?", cState_check_circular_p, 0); rb_define_method(cState, "allow_nan?", cState_allow_nan_p, 0); rb_define_method(cState, "ascii_only?", cState_ascii_only_p, 0); rb_define_method(cState, "depth", cState_depth, 0); rb_define_method(cState, "depth=", cState_depth_set, 1); rb_define_method(cState, "buffer_initial_length", cState_buffer_initial_length, 0); rb_define_method(cState, "buffer_initial_length=", cState_buffer_initial_length_set, 1); rb_define_method(cState, "configure", cState_configure, 1); rb_define_alias(cState, "merge", "configure"); rb_define_method(cState, "to_h", cState_to_h, 0); rb_define_alias(cState, "to_hash", "to_h"); rb_define_method(cState, "[]", cState_aref, 1); rb_define_method(cState, "[]=", cState_aset, 2); rb_define_method(cState, "generate", cState_generate, 1); mGeneratorMethods = rb_define_module_under(mGenerator, "GeneratorMethods"); mObject = rb_define_module_under(mGeneratorMethods, "Object"); rb_define_method(mObject, "to_json", mObject_to_json, -1); mHash = rb_define_module_under(mGeneratorMethods, "Hash"); rb_define_method(mHash, "to_json", mHash_to_json, -1); mArray = rb_define_module_under(mGeneratorMethods, "Array"); rb_define_method(mArray, "to_json", mArray_to_json, -1); #ifdef RUBY_INTEGER_UNIFICATION mInteger = rb_define_module_under(mGeneratorMethods, "Integer"); rb_define_method(mInteger, "to_json", mInteger_to_json, -1); #else mFixnum = rb_define_module_under(mGeneratorMethods, "Fixnum"); rb_define_method(mFixnum, "to_json", mFixnum_to_json, -1); mBignum = rb_define_module_under(mGeneratorMethods, "Bignum"); rb_define_method(mBignum, "to_json", mBignum_to_json, -1); #endif mFloat = rb_define_module_under(mGeneratorMethods, "Float"); rb_define_method(mFloat, "to_json", mFloat_to_json, -1); mString = rb_define_module_under(mGeneratorMethods, "String"); rb_define_singleton_method(mString, "included", mString_included_s, 1); rb_define_method(mString, "to_json", mString_to_json, -1); rb_define_method(mString, "to_json_raw", mString_to_json_raw, -1); rb_define_method(mString, "to_json_raw_object", mString_to_json_raw_object, 0); mString_Extend = rb_define_module_under(mString, "Extend"); rb_define_method(mString_Extend, "json_create", mString_Extend_json_create, 1); mTrueClass = rb_define_module_under(mGeneratorMethods, "TrueClass"); rb_define_method(mTrueClass, "to_json", mTrueClass_to_json, -1); mFalseClass = rb_define_module_under(mGeneratorMethods, "FalseClass"); rb_define_method(mFalseClass, "to_json", mFalseClass_to_json, -1); mNilClass = rb_define_module_under(mGeneratorMethods, "NilClass"); rb_define_method(mNilClass, "to_json", mNilClass_to_json, -1); CRegexp_MULTILINE = rb_const_get(rb_cRegexp, rb_intern("MULTILINE")); i_to_s = rb_intern("to_s"); i_to_json = rb_intern("to_json"); i_new = rb_intern("new"); i_indent = rb_intern("indent"); i_space = rb_intern("space"); i_space_before = rb_intern("space_before"); i_object_nl = rb_intern("object_nl"); i_array_nl = rb_intern("array_nl"); i_max_nesting = rb_intern("max_nesting"); i_allow_nan = rb_intern("allow_nan"); i_ascii_only = rb_intern("ascii_only"); i_depth = rb_intern("depth"); i_buffer_initial_length = rb_intern("buffer_initial_length"); i_pack = rb_intern("pack"); i_unpack = rb_intern("unpack"); i_create_id = rb_intern("create_id"); i_extend = rb_intern("extend"); i_key_p = rb_intern("key?"); i_aref = rb_intern("[]"); i_send = rb_intern("__send__"); i_respond_to_p = rb_intern("respond_to?"); i_match = rb_intern("match"); i_keys = rb_intern("keys"); i_dup = rb_intern("dup"); #ifdef HAVE_RUBY_ENCODING_H CEncoding_UTF_8 = rb_funcall(rb_path2class("Encoding"), rb_intern("find"), 1, rb_str_new2("utf-8")); i_encoding = rb_intern("encoding"); i_encode = rb_intern("encode"); #endif i_SAFE_STATE_PROTOTYPE = rb_intern("SAFE_STATE_PROTOTYPE"); CJSON_SAFE_STATE_PROTOTYPE = Qnil; } ruby-json-2.1.0+dfsg.orig/ext/json/ext/generator/generator.h0000644000175000017500000001642313113111601023366 0ustar boutilboutil#ifndef _GENERATOR_H_ #define _GENERATOR_H_ #include #include #include "ruby.h" #ifdef HAVE_RUBY_RE_H #include "ruby/re.h" #else #include "re.h" #endif #ifndef rb_intern_str #define rb_intern_str(string) SYM2ID(rb_str_intern(string)) #endif #ifndef rb_obj_instance_variables #define rb_obj_instance_variables(object) rb_funcall(object, rb_intern("instance_variables"), 0) #endif #define option_given_p(opts, key) RTEST(rb_funcall(opts, i_key_p, 1, key)) /* unicode definitions */ #define UNI_STRICT_CONVERSION 1 typedef unsigned long UTF32; /* at least 32 bits */ typedef unsigned short UTF16; /* at least 16 bits */ typedef unsigned char UTF8; /* typically 8 bits */ #define UNI_REPLACEMENT_CHAR (UTF32)0x0000FFFD #define UNI_MAX_BMP (UTF32)0x0000FFFF #define UNI_MAX_UTF16 (UTF32)0x0010FFFF #define UNI_MAX_UTF32 (UTF32)0x7FFFFFFF #define UNI_MAX_LEGAL_UTF32 (UTF32)0x0010FFFF #define UNI_SUR_HIGH_START (UTF32)0xD800 #define UNI_SUR_HIGH_END (UTF32)0xDBFF #define UNI_SUR_LOW_START (UTF32)0xDC00 #define UNI_SUR_LOW_END (UTF32)0xDFFF static const int halfShift = 10; /* used for shifting by 10 bits */ static const UTF32 halfBase = 0x0010000UL; static const UTF32 halfMask = 0x3FFUL; static unsigned char isLegalUTF8(const UTF8 *source, unsigned long length); static void unicode_escape(char *buf, UTF16 character); static void unicode_escape_to_buffer(FBuffer *buffer, char buf[6], UTF16 character); static void convert_UTF8_to_JSON_ASCII(FBuffer *buffer, VALUE string); static void convert_UTF8_to_JSON(FBuffer *buffer, VALUE string); static char *fstrndup(const char *ptr, unsigned long len); /* ruby api and some helpers */ typedef struct JSON_Generator_StateStruct { char *indent; long indent_len; char *space; long space_len; char *space_before; long space_before_len; char *object_nl; long object_nl_len; char *array_nl; long array_nl_len; FBuffer *array_delim; FBuffer *object_delim; FBuffer *object_delim2; long max_nesting; char allow_nan; char ascii_only; long depth; long buffer_initial_length; } JSON_Generator_State; #define GET_STATE_TO(self, state) \ TypedData_Get_Struct(self, JSON_Generator_State, &JSON_Generator_State_type, state) #define GET_STATE(self) \ JSON_Generator_State *state; \ GET_STATE_TO(self, state) #define GENERATE_JSON(type) \ FBuffer *buffer; \ VALUE Vstate; \ JSON_Generator_State *state; \ \ rb_scan_args(argc, argv, "01", &Vstate); \ Vstate = cState_from_state_s(cState, Vstate); \ TypedData_Get_Struct(Vstate, JSON_Generator_State, &JSON_Generator_State_type, state); \ buffer = cState_prepare_buffer(Vstate); \ generate_json_##type(buffer, Vstate, state, self); \ return fbuffer_to_s(buffer) static VALUE mHash_to_json(int argc, VALUE *argv, VALUE self); static VALUE mArray_to_json(int argc, VALUE *argv, VALUE self); #ifdef RUBY_INTEGER_UNIFICATION static VALUE mInteger_to_json(int argc, VALUE *argv, VALUE self); #else static VALUE mFixnum_to_json(int argc, VALUE *argv, VALUE self); static VALUE mBignum_to_json(int argc, VALUE *argv, VALUE self); #endif static VALUE mFloat_to_json(int argc, VALUE *argv, VALUE self); static VALUE mString_included_s(VALUE self, VALUE modul); static VALUE mString_to_json(int argc, VALUE *argv, VALUE self); static VALUE mString_to_json_raw_object(VALUE self); static VALUE mString_to_json_raw(int argc, VALUE *argv, VALUE self); static VALUE mString_Extend_json_create(VALUE self, VALUE o); static VALUE mTrueClass_to_json(int argc, VALUE *argv, VALUE self); static VALUE mFalseClass_to_json(int argc, VALUE *argv, VALUE self); static VALUE mNilClass_to_json(int argc, VALUE *argv, VALUE self); static VALUE mObject_to_json(int argc, VALUE *argv, VALUE self); static void State_free(void *state); static VALUE cState_s_allocate(VALUE klass); static VALUE cState_configure(VALUE self, VALUE opts); static VALUE cState_to_h(VALUE self); static void generate_json(FBuffer *buffer, VALUE Vstate, JSON_Generator_State *state, VALUE obj); static void generate_json_object(FBuffer *buffer, VALUE Vstate, JSON_Generator_State *state, VALUE obj); static void generate_json_array(FBuffer *buffer, VALUE Vstate, JSON_Generator_State *state, VALUE obj); static void generate_json_string(FBuffer *buffer, VALUE Vstate, JSON_Generator_State *state, VALUE obj); static void generate_json_null(FBuffer *buffer, VALUE Vstate, JSON_Generator_State *state, VALUE obj); static void generate_json_false(FBuffer *buffer, VALUE Vstate, JSON_Generator_State *state, VALUE obj); static void generate_json_true(FBuffer *buffer, VALUE Vstate, JSON_Generator_State *state, VALUE obj); #ifdef RUBY_INTEGER_UNIFICATION static void generate_json_integer(FBuffer *buffer, VALUE Vstate, JSON_Generator_State *state, VALUE obj); #endif static void generate_json_fixnum(FBuffer *buffer, VALUE Vstate, JSON_Generator_State *state, VALUE obj); static void generate_json_bignum(FBuffer *buffer, VALUE Vstate, JSON_Generator_State *state, VALUE obj); static void generate_json_float(FBuffer *buffer, VALUE Vstate, JSON_Generator_State *state, VALUE obj); static VALUE cState_partial_generate(VALUE self, VALUE obj); static VALUE cState_generate(VALUE self, VALUE obj); static VALUE cState_initialize(int argc, VALUE *argv, VALUE self); static VALUE cState_from_state_s(VALUE self, VALUE opts); static VALUE cState_indent(VALUE self); static VALUE cState_indent_set(VALUE self, VALUE indent); static VALUE cState_space(VALUE self); static VALUE cState_space_set(VALUE self, VALUE space); static VALUE cState_space_before(VALUE self); static VALUE cState_space_before_set(VALUE self, VALUE space_before); static VALUE cState_object_nl(VALUE self); static VALUE cState_object_nl_set(VALUE self, VALUE object_nl); static VALUE cState_array_nl(VALUE self); static VALUE cState_array_nl_set(VALUE self, VALUE array_nl); static VALUE cState_max_nesting(VALUE self); static VALUE cState_max_nesting_set(VALUE self, VALUE depth); static VALUE cState_allow_nan_p(VALUE self); static VALUE cState_ascii_only_p(VALUE self); static VALUE cState_depth(VALUE self); static VALUE cState_depth_set(VALUE self, VALUE depth); static FBuffer *cState_prepare_buffer(VALUE self); #ifndef ZALLOC #define ZALLOC(type) ((type *)ruby_zalloc(sizeof(type))) static inline void *ruby_zalloc(size_t n) { void *p = ruby_xmalloc(n); memset(p, 0, n); return p; } #endif #ifdef TypedData_Make_Struct static const rb_data_type_t JSON_Generator_State_type; #define NEW_TYPEDDATA_WRAPPER 1 #else #define TypedData_Make_Struct(klass, type, ignore, json) Data_Make_Struct(klass, type, NULL, State_free, json) #define TypedData_Get_Struct(self, JSON_Generator_State, ignore, json) Data_Get_Struct(self, JSON_Generator_State, json) #endif #endif ruby-json-2.1.0+dfsg.orig/ext/json/ext/generator/extconf.rb0000644000175000017500000000012113113111601023206 0ustar boutilboutilrequire 'mkmf' $defs << "-DJSON_GENERATOR" create_makefile 'json/ext/generator' ruby-json-2.1.0+dfsg.orig/ext/json/extconf.rb0000644000175000017500000000004713113111601020427 0ustar boutilboutilrequire 'mkmf' create_makefile('json') ruby-json-2.1.0+dfsg.orig/json_pure.gemspec0000644000175000017500000001217613113111601020242 0ustar boutilboutil# -*- encoding: utf-8 -*- # stub: json_pure 2.1.0 ruby lib Gem::Specification.new do |s| s.name = "json_pure".freeze s.version = "2.1.0" s.required_rubygems_version = Gem::Requirement.new(">= 0".freeze) if s.respond_to? :required_rubygems_version= s.require_paths = ["lib".freeze] s.authors = ["Florian Frank".freeze] s.date = "2017-04-18" s.description = "This is a JSON implementation in pure Ruby.".freeze s.email = "flori@ping.de".freeze s.extra_rdoc_files = ["README.md".freeze] s.files = ["./tests/test_helper.rb".freeze, ".gitignore".freeze, ".travis.yml".freeze, "CHANGES.md".freeze, "Gemfile".freeze, "README-json-jruby.md".freeze, "README.md".freeze, "Rakefile".freeze, "VERSION".freeze, "data/example.json".freeze, "data/index.html".freeze, "data/prototype.js".freeze, "diagrams/.keep".freeze, "ext/json/ext/fbuffer/fbuffer.h".freeze, "ext/json/ext/generator/depend".freeze, "ext/json/ext/generator/extconf.rb".freeze, "ext/json/ext/generator/generator.c".freeze, "ext/json/ext/generator/generator.h".freeze, "ext/json/ext/parser/depend".freeze, "ext/json/ext/parser/extconf.rb".freeze, "ext/json/ext/parser/parser.c".freeze, "ext/json/ext/parser/parser.h".freeze, "ext/json/ext/parser/parser.rl".freeze, "ext/json/extconf.rb".freeze, "install.rb".freeze, "java/src/json/ext/ByteListTranscoder.java".freeze, "java/src/json/ext/Generator.java".freeze, "java/src/json/ext/GeneratorMethods.java".freeze, "java/src/json/ext/GeneratorService.java".freeze, "java/src/json/ext/GeneratorState.java".freeze, "java/src/json/ext/OptionsReader.java".freeze, "java/src/json/ext/Parser.java".freeze, "java/src/json/ext/Parser.rl".freeze, "java/src/json/ext/ParserService.java".freeze, "java/src/json/ext/RuntimeInfo.java".freeze, "java/src/json/ext/StringDecoder.java".freeze, "java/src/json/ext/StringEncoder.java".freeze, "java/src/json/ext/Utils.java".freeze, "json-java.gemspec".freeze, "json.gemspec".freeze, "json_pure.gemspec".freeze, "lib/json.rb".freeze, "lib/json/add/bigdecimal.rb".freeze, "lib/json/add/complex.rb".freeze, "lib/json/add/core.rb".freeze, "lib/json/add/date.rb".freeze, "lib/json/add/date_time.rb".freeze, "lib/json/add/exception.rb".freeze, "lib/json/add/ostruct.rb".freeze, "lib/json/add/range.rb".freeze, "lib/json/add/rational.rb".freeze, "lib/json/add/regexp.rb".freeze, "lib/json/add/struct.rb".freeze, "lib/json/add/symbol.rb".freeze, "lib/json/add/time.rb".freeze, "lib/json/common.rb".freeze, "lib/json/ext.rb".freeze, "lib/json/ext/.keep".freeze, "lib/json/generic_object.rb".freeze, "lib/json/pure.rb".freeze, "lib/json/pure/generator.rb".freeze, "lib/json/pure/parser.rb".freeze, "lib/json/version.rb".freeze, "references/rfc7159.txt".freeze, "tests/fixtures/fail10.json".freeze, "tests/fixtures/fail11.json".freeze, "tests/fixtures/fail12.json".freeze, "tests/fixtures/fail13.json".freeze, "tests/fixtures/fail14.json".freeze, "tests/fixtures/fail18.json".freeze, "tests/fixtures/fail19.json".freeze, "tests/fixtures/fail2.json".freeze, "tests/fixtures/fail20.json".freeze, "tests/fixtures/fail21.json".freeze, "tests/fixtures/fail22.json".freeze, "tests/fixtures/fail23.json".freeze, "tests/fixtures/fail24.json".freeze, "tests/fixtures/fail25.json".freeze, "tests/fixtures/fail27.json".freeze, "tests/fixtures/fail28.json".freeze, "tests/fixtures/fail3.json".freeze, "tests/fixtures/fail4.json".freeze, "tests/fixtures/fail5.json".freeze, "tests/fixtures/fail6.json".freeze, "tests/fixtures/fail7.json".freeze, "tests/fixtures/fail8.json".freeze, "tests/fixtures/fail9.json".freeze, "tests/fixtures/obsolete_fail1.json".freeze, "tests/fixtures/pass1.json".freeze, "tests/fixtures/pass15.json".freeze, "tests/fixtures/pass16.json".freeze, "tests/fixtures/pass17.json".freeze, "tests/fixtures/pass2.json".freeze, "tests/fixtures/pass26.json".freeze, "tests/fixtures/pass3.json".freeze, "tests/json_addition_test.rb".freeze, "tests/json_common_interface_test.rb".freeze, "tests/json_encoding_test.rb".freeze, "tests/json_ext_parser_test.rb".freeze, "tests/json_fixtures_test.rb".freeze, "tests/json_generator_test.rb".freeze, "tests/json_generic_object_test.rb".freeze, "tests/json_parser_test.rb".freeze, "tests/json_string_matching_test.rb".freeze, "tests/test_helper.rb".freeze, "tools/diff.sh".freeze, "tools/fuzz.rb".freeze, "tools/server.rb".freeze] s.homepage = "http://flori.github.com/json".freeze s.licenses = ["Ruby".freeze] s.rdoc_options = ["--title".freeze, "JSON implemention for ruby".freeze, "--main".freeze, "README.md".freeze] s.required_ruby_version = Gem::Requirement.new(">= 1.9".freeze) s.rubygems_version = "2.6.11".freeze s.summary = "JSON Implementation for Ruby".freeze s.test_files = ["./tests/test_helper.rb".freeze] if s.respond_to? :specification_version then s.specification_version = 4 if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then s.add_development_dependency(%q.freeze, [">= 0"]) s.add_development_dependency(%q.freeze, ["~> 2.0"]) else s.add_dependency(%q.freeze, [">= 0"]) s.add_dependency(%q.freeze, ["~> 2.0"]) end else s.add_dependency(%q.freeze, [">= 0"]) s.add_dependency(%q.freeze, ["~> 2.0"]) end end ruby-json-2.1.0+dfsg.orig/README-json-jruby.md0000644000175000017500000000164113113111601020243 0ustar boutilboutilJSON-JRuby ========== JSON-JRuby is a port of Florian Frank's native [`json` library](http://json.rubyforge.org/) to JRuby. It aims to be a perfect drop-in replacement for `json_pure`. Development version =================== The latest version is available from the [Git repository](http://github.com/mernen/json-jruby/tree): git clone git://github.com/mernen/json-jruby.git Compiling ========= You'll need JRuby version 1.2 or greater to build JSON-JRuby. Its path must be set on the `jruby.dir` property of `nbproject/project.properties` (defaults to `../jruby`). Additionally, you'll need [Ant](http://ant.apache.org/), and [Ragel](http://www.cs.queensu.ca/~thurston/ragel/) 6.4 or greater. Then, from the folder where the sources are located, type: ant clean jar to clean any leftovers from previous builds and generate the `.jar` files. To generate a RubyGem, specify the `gem` action rather than `jar`. ruby-json-2.1.0+dfsg.orig/Rakefile0000644000175000017500000002701013113111601016327 0ustar boutilboutilbegin require 'rubygems/package_task' rescue LoadError end require 'rbconfig' include\ begin RbConfig rescue NameError Config end require 'rake/clean' CLOBBER.include 'doc', 'Gemfile.lock' CLEAN.include FileList['diagrams/*.*'], 'doc', 'coverage', 'tmp', FileList["ext/**/{Makefile,mkmf.log}"], 'build', 'dist', FileList['**/*.rbc'], FileList["{ext,lib}/**/*.{so,bundle,#{CONFIG['DLEXT']},o,obj,pdb,lib,manifest,exp,def,jar,class,dSYM}"], FileList['java/src/**/*.class'] require 'rake/testtask' class UndocumentedTestTask < Rake::TestTask def desc(*) end end MAKE = ENV['MAKE'] || %w[gmake make].find { |c| system(c, '-v') } BUNDLE = ENV['BUNDLE'] || %w[bundle].find { |c| system(c, '-v') } PKG_NAME = 'json' PKG_TITLE = 'JSON Implementation for Ruby' PKG_VERSION = File.read('VERSION').chomp PKG_FILES = FileList[`git ls-files`.split(/\n/)] EXT_ROOT_DIR = 'ext/json/ext' EXT_PARSER_DIR = "#{EXT_ROOT_DIR}/parser" EXT_PARSER_DL = "#{EXT_PARSER_DIR}/parser.#{CONFIG['DLEXT']}" RAGEL_PATH = "#{EXT_PARSER_DIR}/parser.rl" EXT_PARSER_SRC = "#{EXT_PARSER_DIR}/parser.c" EXT_GENERATOR_DIR = "#{EXT_ROOT_DIR}/generator" EXT_GENERATOR_DL = "#{EXT_GENERATOR_DIR}/generator.#{CONFIG['DLEXT']}" EXT_GENERATOR_SRC = "#{EXT_GENERATOR_DIR}/generator.c" JAVA_DIR = "java/src/json/ext" JAVA_RAGEL_PATH = "#{JAVA_DIR}/Parser.rl" JAVA_PARSER_SRC = "#{JAVA_DIR}/Parser.java" JAVA_SOURCES = FileList["#{JAVA_DIR}/*.java"] JAVA_CLASSES = [] JRUBY_PARSER_JAR = File.expand_path("lib/json/ext/parser.jar") JRUBY_GENERATOR_JAR = File.expand_path("lib/json/ext/generator.jar") RAGEL_CODEGEN = %w[rlcodegen rlgen-cd ragel].find { |c| system(c, '-v') } RAGEL_DOTGEN = %w[rlgen-dot rlgen-cd ragel].find { |c| system(c, '-v') } desc "Installing library (pure)" task :install_pure => :version do ruby 'install.rb' end task :install_ext_really do sitearchdir = CONFIG["sitearchdir"] cd 'ext' do for file in Dir["json/ext/*.#{CONFIG['DLEXT']}"] d = File.join(sitearchdir, file) mkdir_p File.dirname(d) install(file, d) end warn " *** Installed EXT ruby library." end end desc "Installing library (extension)" task :install_ext => [ :compile, :install_pure, :install_ext_really ] desc "Installing library (extension)" task :install => :install_ext if defined?(Gem) and defined?(Gem::PackageTask) spec_pure = Gem::Specification.new do |s| s.name = 'json_pure' s.version = PKG_VERSION s.summary = PKG_TITLE s.description = "This is a JSON implementation in pure Ruby." s.files = PKG_FILES s.require_path = 'lib' s.add_development_dependency 'rake' s.add_development_dependency 'test-unit', '~> 2.0' s.extra_rdoc_files << 'README.md' s.rdoc_options << '--title' << 'JSON implemention for ruby' << '--main' << 'README.md' s.test_files.concat Dir['./tests/test_*.rb'] s.author = "Florian Frank" s.email = "flori@ping.de" s.homepage = "http://flori.github.com/#{PKG_NAME}" s.license = 'Ruby' s.required_ruby_version = '>= 1.9' end desc 'Creates a json_pure.gemspec file' task :gemspec_pure => :version do File.open('json_pure.gemspec', 'w') do |gemspec| gemspec.write spec_pure.to_ruby end end Gem::PackageTask.new(spec_pure) do |pkg| pkg.need_tar = true pkg.package_files = PKG_FILES end spec_ext = Gem::Specification.new do |s| s.name = 'json' s.version = PKG_VERSION s.summary = PKG_TITLE s.description = "This is a JSON implementation as a Ruby extension in C." s.files = PKG_FILES s.extensions = FileList['ext/**/extconf.rb'] s.require_path = 'lib' s.add_development_dependency 'rake' s.add_development_dependency 'test-unit', '~> 2.0' s.extra_rdoc_files << 'README.md' s.rdoc_options << '--title' << 'JSON implemention for Ruby' << '--main' << 'README.md' s.test_files.concat Dir['./tests/test_*.rb'] s.author = "Florian Frank" s.email = "flori@ping.de" s.homepage = "http://flori.github.com/#{PKG_NAME}" s.license = 'Ruby' s.required_ruby_version = '>= 1.9' end desc 'Creates a json.gemspec file' task :gemspec_ext => :version do File.open('json.gemspec', 'w') do |gemspec| gemspec.write spec_ext.to_ruby end end Gem::PackageTask.new(spec_ext) do |pkg| pkg.need_tar = true pkg.package_files = PKG_FILES end desc 'Create all gemspec files' task :gemspec => [ :gemspec_pure, :gemspec_ext ] end desc m = "Writing version information for #{PKG_VERSION}" task :version do puts m File.open(File.join('lib', 'json', 'version.rb'), 'w') do |v| v.puts < [ :clean, :check_env, :do_test_pure ] UndocumentedTestTask.new do |t| t.name = 'do_test_pure' t.libs << 'lib' << 'tests' t.test_files = FileList['tests/*_test.rb'] t.verbose = true t.options = '-v' end desc "Testing library (pure ruby and extension)" task :test do sh "env JSON=pure #{BUNDLE} exec rake test_pure" or exit 1 sh "env JSON=ext #{BUNDLE} exec rake test_ext" or exit 1 end namespace :gems do desc 'Install all development gems' task :install do sh "#{BUNDLE}" end end if defined?(RUBY_ENGINE) and RUBY_ENGINE == 'jruby' ENV['JAVA_HOME'] ||= [ '/usr/local/java/jdk', '/usr/lib/jvm/java-6-openjdk', '/Library/Java/Home', ].find { |c| File.directory?(c) } if ENV['JAVA_HOME'] warn " *** JAVA_HOME is set to #{ENV['JAVA_HOME'].inspect}" ENV['PATH'] = ENV['PATH'].split(/:/).unshift(java_path = "#{ENV['JAVA_HOME']}/bin") * ':' warn " *** java binaries are assumed to be in #{java_path.inspect}" else warn " *** JAVA_HOME was not set or could not be guessed!" exit 1 end file JAVA_PARSER_SRC => JAVA_RAGEL_PATH do cd JAVA_DIR do if RAGEL_CODEGEN == 'ragel' sh "ragel Parser.rl -J -o Parser.java" else sh "ragel -x Parser.rl | #{RAGEL_CODEGEN} -J" end end end desc "Generate parser for java with ragel" task :ragel => JAVA_PARSER_SRC desc "Delete the ragel generated Java source" task :ragel_clean do rm_rf JAVA_PARSER_SRC end JRUBY_JAR = File.join(CONFIG["libdir"], "jruby.jar") if File.exist?(JRUBY_JAR) JAVA_SOURCES.each do |src| classpath = (Dir['java/lib/*.jar'] << 'java/src' << JRUBY_JAR) * ':' obj = src.sub(/\.java\Z/, '.class') file obj => src do sh 'javac', '-classpath', classpath, '-source', '1.5', '-target', '1.5', src end JAVA_CLASSES << obj end else warn "WARNING: Cannot find jruby in path => Cannot build jruby extension!" end desc "Compiling jruby extension" task :compile => JAVA_CLASSES desc "Package the jruby gem" task :jruby_gem => :create_jar do sh 'gem build json-java.gemspec' mkdir_p 'pkg' mv "json-#{PKG_VERSION}-java.gem", 'pkg' end desc "Testing library (jruby)" task :test_ext => [ :check_env, :create_jar, :do_test_ext ] UndocumentedTestTask.new do |t| t.name = 'do_test_ext' t.libs << 'lib' << 'tests' t.test_files = FileList['tests/*_test.rb'] t.verbose = true t.options = '-v' end file JRUBY_PARSER_JAR => :compile do cd 'java/src' do parser_classes = FileList[ "json/ext/ByteListTranscoder*.class", "json/ext/OptionsReader*.class", "json/ext/Parser*.class", "json/ext/RuntimeInfo*.class", "json/ext/StringDecoder*.class", "json/ext/Utils*.class" ] sh 'jar', 'cf', File.basename(JRUBY_PARSER_JAR), *parser_classes mv File.basename(JRUBY_PARSER_JAR), File.dirname(JRUBY_PARSER_JAR) end end desc "Create parser jar" task :create_parser_jar => JRUBY_PARSER_JAR file JRUBY_GENERATOR_JAR => :compile do cd 'java/src' do generator_classes = FileList[ "json/ext/ByteListTranscoder*.class", "json/ext/OptionsReader*.class", "json/ext/Generator*.class", "json/ext/RuntimeInfo*.class", "json/ext/StringEncoder*.class", "json/ext/Utils*.class" ] sh 'jar', 'cf', File.basename(JRUBY_GENERATOR_JAR), *generator_classes mv File.basename(JRUBY_GENERATOR_JAR), File.dirname(JRUBY_GENERATOR_JAR) end end desc "Create generator jar" task :create_generator_jar => JRUBY_GENERATOR_JAR desc "Create parser and generator jars" task :create_jar => [ :create_parser_jar, :create_generator_jar ] desc "Build all gems and archives for a new release of the jruby extension." task :build => [ :clean, :version, :jruby_gem ] task :release => :build else desc "Compiling extension" task :compile => [ EXT_PARSER_DL, EXT_GENERATOR_DL ] file EXT_PARSER_DL => EXT_PARSER_SRC do cd EXT_PARSER_DIR do ruby 'extconf.rb' sh MAKE end cp "#{EXT_PARSER_DIR}/parser.#{CONFIG['DLEXT']}", EXT_ROOT_DIR end file EXT_GENERATOR_DL => EXT_GENERATOR_SRC do cd EXT_GENERATOR_DIR do ruby 'extconf.rb' sh MAKE end cp "#{EXT_GENERATOR_DIR}/generator.#{CONFIG['DLEXT']}", EXT_ROOT_DIR end desc "Testing library (extension)" task :test_ext => [ :check_env, :compile, :do_test_ext ] UndocumentedTestTask.new do |t| t.name = 'do_test_ext' t.libs << 'ext' << 'lib' << 'tests' t.test_files = FileList['tests/*_test.rb'] t.verbose = true t.options = '-v' end desc "Generate parser with ragel" task :ragel => EXT_PARSER_SRC desc "Delete the ragel generated C source" task :ragel_clean do rm_rf EXT_PARSER_SRC end desc "Update the tags file" task :tags do system 'ctags', *Dir['**/*.{rb,c,h,java}'] end file EXT_PARSER_SRC => RAGEL_PATH do cd EXT_PARSER_DIR do if RAGEL_CODEGEN == 'ragel' sh "ragel parser.rl -G2 -o parser.c" else sh "ragel -x parser.rl | #{RAGEL_CODEGEN} -G2" end src = File.read("parser.c").gsub(/[ \t]+$/, '') src.gsub!(/^static const int (JSON_.*=.*);$/, 'enum {\1};') File.open("parser.c", "w") {|f| f.print src} end end desc "Generate diagrams of ragel parser (ps)" task :ragel_dot_ps do root = 'diagrams' specs = [] File.new(RAGEL_PATH).grep(/^\s*machine\s*(\S+);\s*$/) { specs << $1 } for s in specs if RAGEL_DOTGEN == 'ragel' sh "ragel #{RAGEL_PATH} -S#{s} -p -V | dot -Tps -o#{root}/#{s}.ps" else sh "ragel -x #{RAGEL_PATH} -S#{s} | #{RAGEL_DOTGEN} -p|dot -Tps -o#{root}/#{s}.ps" end end end desc "Generate diagrams of ragel parser (png)" task :ragel_dot_png do root = 'diagrams' specs = [] File.new(RAGEL_PATH).grep(/^\s*machine\s*(\S+);\s*$/) { specs << $1 } for s in specs if RAGEL_DOTGEN == 'ragel' sh "ragel #{RAGEL_PATH} -S#{s} -p -V | dot -Tpng -o#{root}/#{s}.png" else sh "ragel -x #{RAGEL_PATH} -S#{s} | #{RAGEL_DOTGEN} -p|dot -Tpng -o#{root}/#{s}.png" end end end desc "Generate diagrams of ragel parser" task :ragel_dot => [ :ragel_dot_png, :ragel_dot_ps ] desc "Build all gems and archives for a new release of json and json_pure." task :build => [ :clean, :gemspec, :package ] task :release => :build end desc "Compile in the the source directory" task :default => [ :clean, :gemspec, :test ] ruby-json-2.1.0+dfsg.orig/diagrams/0000755000175000017500000000000013113111601016451 5ustar boutilboutilruby-json-2.1.0+dfsg.orig/diagrams/.keep0000644000175000017500000000000013113111601017364 0ustar boutilboutilruby-json-2.1.0+dfsg.orig/install.rb0000755000175000017500000000067113113111601016664 0ustar boutilboutil#!/usr/bin/env ruby require 'fileutils' include FileUtils::Verbose require 'rbconfig' include\ begin RbConfig rescue NameError Config end sitelibdir = CONFIG["sitelibdir"] cd 'lib' do install('json.rb', sitelibdir) mkdir_p File.join(sitelibdir, 'json') for file in Dir['json/**/*}'] d = File.join(sitelibdir, file) mkdir_p File.dirname(d) install(file, d) end end warn " *** Installed PURE ruby library." ruby-json-2.1.0+dfsg.orig/tests/0000755000175000017500000000000013113111601016024 5ustar boutilboutilruby-json-2.1.0+dfsg.orig/tests/json_common_interface_test.rb0000644000175000017500000000671213113111601023757 0ustar boutilboutil#frozen_string_literal: false require 'test_helper' require 'stringio' require 'tempfile' class JSONCommonInterfaceTest < Test::Unit::TestCase include JSON def setup @hash = { 'a' => 2, 'b' => 3.141, 'c' => 'c', 'd' => [ 1, "b", 3.14 ], 'e' => { 'foo' => 'bar' }, 'g' => "\"\0\037", 'h' => 1000.0, 'i' => 0.001 } @json = '{"a":2,"b":3.141,"c":"c","d":[1,"b",3.14],"e":{"foo":"bar"},'\ '"g":"\\"\\u0000\\u001f","h":1000.0,"i":0.001}' end def test_index assert_equal @json, JSON[@hash] assert_equal @hash, JSON[@json] end def test_parser assert_match /::Parser\z/, JSON.parser.name end def test_generator assert_match /::Generator\z/, JSON.generator.name end def test_state assert_match /::Generator::State\z/, JSON.state.name end def test_create_id assert_equal 'json_class', JSON.create_id JSON.create_id = 'foo_bar' assert_equal 'foo_bar', JSON.create_id ensure JSON.create_id = 'json_class' end def test_deep_const_get assert_raise(ArgumentError) { JSON.deep_const_get('Nix::Da') } assert_equal File::SEPARATOR, JSON.deep_const_get('File::SEPARATOR') end def test_parse assert_equal [ 1, 2, 3, ], JSON.parse('[ 1, 2, 3 ]') end def test_parse_bang assert_equal [ 1, NaN, 3, ], JSON.parse!('[ 1, NaN, 3 ]') end def test_generate assert_equal '[1,2,3]', JSON.generate([ 1, 2, 3 ]) end def test_fast_generate assert_equal '[1,2,3]', JSON.generate([ 1, 2, 3 ]) end def test_pretty_generate assert_equal "[\n 1,\n 2,\n 3\n]", JSON.pretty_generate([ 1, 2, 3 ]) end def test_load assert_equal @hash, JSON.load(@json) tempfile = Tempfile.open('@json') tempfile.write @json tempfile.rewind assert_equal @hash, JSON.load(tempfile) stringio = StringIO.new(@json) stringio.rewind assert_equal @hash, JSON.load(stringio) assert_equal nil, JSON.load(nil) assert_equal nil, JSON.load('') ensure tempfile.close! end def test_load_with_options json = '{ "foo": NaN }' assert JSON.load(json, nil, :allow_nan => true)['foo'].nan? end def test_load_null assert_equal nil, JSON.load(nil, nil, :allow_blank => true) assert_raise(TypeError) { JSON.load(nil, nil, :allow_blank => false) } assert_raise(JSON::ParserError) { JSON.load('', nil, :allow_blank => false) } end def test_dump too_deep = '[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]' assert_equal too_deep, dump(eval(too_deep)) assert_kind_of String, Marshal.dump(eval(too_deep)) assert_raise(ArgumentError) { dump(eval(too_deep), 100) } assert_raise(ArgumentError) { Marshal.dump(eval(too_deep), 100) } assert_equal too_deep, dump(eval(too_deep), 101) assert_kind_of String, Marshal.dump(eval(too_deep), 101) output = StringIO.new dump(eval(too_deep), output) assert_equal too_deep, output.string output = StringIO.new dump(eval(too_deep), output, 101) assert_equal too_deep, output.string end def test_dump_should_modify_defaults max_nesting = JSON.dump_default_options[:max_nesting] dump([], StringIO.new, 10) assert_equal max_nesting, JSON.dump_default_options[:max_nesting] end def test_JSON assert_equal @json, JSON(@hash) assert_equal @hash, JSON(@json) end end ruby-json-2.1.0+dfsg.orig/tests/json_parser_test.rb0000644000175000017500000003424513113111601021745 0ustar boutilboutil# encoding: utf-8 # frozen_string_literal: false require 'test_helper' require 'stringio' require 'tempfile' require 'ostruct' class JSONParserTest < Test::Unit::TestCase include JSON def test_construction parser = JSON::Parser.new('test') assert_equal 'test', parser.source end def test_argument_encoding source = "{}".encode("UTF-16") JSON::Parser.new(source) assert_equal Encoding::UTF_16, source.encoding end if defined?(Encoding::UTF_16) def test_error_message_encoding bug10705 = '[ruby-core:67386] [Bug #10705]' json = ".\"\xE2\x88\x9A\"".force_encoding(Encoding::UTF_8) e = assert_raise(JSON::ParserError) { JSON::Ext::Parser.new(json).parse } assert_equal(Encoding::UTF_8, e.message.encoding, bug10705) assert_include(e.message, json, bug10705) end if defined?(Encoding::UTF_8) and defined?(JSON::Ext::Parser) def test_parsing parser = JSON::Parser.new('"test"') assert_equal 'test', parser.parse end def test_parser_reset parser = Parser.new('{"a":"b"}') assert_equal({ 'a' => 'b' }, parser.parse) assert_equal({ 'a' => 'b' }, parser.parse) end def test_parse_values assert_equal(nil, parse('null')) assert_equal(false, parse('false')) assert_equal(true, parse('true')) assert_equal(-23, parse('-23')) assert_equal(23, parse('23')) assert_in_delta(0.23, parse('0.23'), 1e-2) assert_in_delta(0.0, parse('0e0'), 1e-2) assert_equal("", parse('""')) assert_equal("foobar", parse('"foobar"')) end def test_parse_simple_arrays assert_equal([], parse('[]')) assert_equal([], parse(' [ ] ')) assert_equal([ nil ], parse('[null]')) assert_equal([ false ], parse('[false]')) assert_equal([ true ], parse('[true]')) assert_equal([ -23 ], parse('[-23]')) assert_equal([ 23 ], parse('[23]')) assert_equal_float([ 0.23 ], parse('[0.23]')) assert_equal_float([ 0.0 ], parse('[0e0]')) assert_equal([""], parse('[""]')) assert_equal(["foobar"], parse('["foobar"]')) assert_equal([{}], parse('[{}]')) end def test_parse_simple_objects assert_equal({}, parse('{}')) assert_equal({}, parse(' { } ')) assert_equal({ "a" => nil }, parse('{ "a" : null}')) assert_equal({ "a" => nil }, parse('{"a":null}')) assert_equal({ "a" => false }, parse('{ "a" : false } ')) assert_equal({ "a" => false }, parse('{"a":false}')) assert_raise(JSON::ParserError) { parse('{false}') } assert_equal({ "a" => true }, parse('{"a":true}')) assert_equal({ "a" => true }, parse(' { "a" : true } ')) assert_equal({ "a" => -23 }, parse(' { "a" : -23 } ')) assert_equal({ "a" => -23 }, parse(' { "a" : -23 } ')) assert_equal({ "a" => 23 }, parse('{"a":23 } ')) assert_equal({ "a" => 23 }, parse(' { "a" : 23 } ')) assert_equal({ "a" => 0.23 }, parse(' { "a" : 0.23 } ')) assert_equal({ "a" => 0.23 }, parse(' { "a" : 0.23 } ')) end def test_parse_numbers assert_raise(JSON::ParserError) { parse('+23.2') } assert_raise(JSON::ParserError) { parse('+23') } assert_raise(JSON::ParserError) { parse('.23') } assert_raise(JSON::ParserError) { parse('023') } assert_equal 23, parse('23') assert_equal -23, parse('-23') assert_equal_float 3.141, parse('3.141') assert_equal_float -3.141, parse('-3.141') assert_equal_float 3.141, parse('3141e-3') assert_equal_float 3.141, parse('3141.1e-3') assert_equal_float 3.141, parse('3141E-3') assert_equal_float 3.141, parse('3141.0E-3') assert_equal_float -3.141, parse('-3141.0e-3') assert_equal_float -3.141, parse('-3141e-3') assert_raise(ParserError) { parse('NaN') } assert parse('NaN', :allow_nan => true).nan? assert_raise(ParserError) { parse('Infinity') } assert_equal 1.0/0, parse('Infinity', :allow_nan => true) assert_raise(ParserError) { parse('-Infinity') } assert_equal -1.0/0, parse('-Infinity', :allow_nan => true) end def test_parse_bigdecimals assert_equal(BigDecimal, JSON.parse('{"foo": 9.01234567890123456789}', decimal_class: BigDecimal)["foo"].class) assert_equal(BigDecimal.new("0.901234567890123456789E1"),JSON.parse('{"foo": 9.01234567890123456789}', decimal_class: BigDecimal)["foo"] ) end if Array.method_defined?(:permutation) def test_parse_more_complex_arrays a = [ nil, false, true, "foßbar", [ "n€st€d", true ], { "nested" => true, "n€ßt€ð2" => {} }] a.permutation.each do |perm| json = pretty_generate(perm) assert_equal perm, parse(json) end end def test_parse_complex_objects a = [ nil, false, true, "foßbar", [ "n€st€d", true ], { "nested" => true, "n€ßt€ð2" => {} }] a.permutation.each do |perm| s = "a" orig_obj = perm.inject({}) { |h, x| h[s.dup] = x; s = s.succ; h } json = pretty_generate(orig_obj) assert_equal orig_obj, parse(json) end end end def test_parse_arrays assert_equal([1,2,3], parse('[1,2,3]')) assert_equal([1.2,2,3], parse('[1.2,2,3]')) assert_equal([[],[[],[]]], parse('[[],[[],[]]]')) assert_equal([], parse('[]')) assert_equal([], parse(' [ ] ')) assert_equal([1], parse('[1]')) assert_equal([1], parse(' [ 1 ] ')) ary = [[1], ["foo"], [3.14], [4711.0], [2.718], [nil], [[1, -2, 3]], [false], [true]] assert_equal(ary, parse('[[1],["foo"],[3.14],[47.11e+2],[2718.0E-3],[null],[[1,-2,3]],[false],[true]]')) assert_equal(ary, parse(%Q{ [ [1] , ["foo"] , [3.14] \t , [47.11e+2]\s , [2718.0E-3 ],\r[ null] , [[1, -2, 3 ]], [false ],[ true]\n ] })) end def test_parse_json_primitive_values assert_raise(JSON::ParserError) { parse('') } assert_raise(TypeError) { parse(nil) } assert_raise(JSON::ParserError) { parse(' /* foo */ ') } assert_equal nil, parse('null') assert_equal false, parse('false') assert_equal true, parse('true') assert_equal 23, parse('23') assert_equal 1, parse('1') assert_equal_float 3.141, parse('3.141'), 1E-3 assert_equal 2 ** 64, parse('18446744073709551616') assert_equal 'foo', parse('"foo"') assert parse('NaN', :allow_nan => true).nan? assert parse('Infinity', :allow_nan => true).infinite? assert parse('-Infinity', :allow_nan => true).infinite? assert_raise(JSON::ParserError) { parse('[ 1, ]') } end def test_parse_some_strings assert_equal([""], parse('[""]')) assert_equal(["\\"], parse('["\\\\"]')) assert_equal(['"'], parse('["\""]')) assert_equal(['\\"\\'], parse('["\\\\\\"\\\\"]')) assert_equal( ["\"\b\n\r\t\0\037"], parse('["\"\b\n\r\t\u0000\u001f"]') ) end def test_parse_big_integers json1 = JSON(orig = (1 << 31) - 1) assert_equal orig, parse(json1) json2 = JSON(orig = 1 << 31) assert_equal orig, parse(json2) json3 = JSON(orig = (1 << 62) - 1) assert_equal orig, parse(json3) json4 = JSON(orig = 1 << 62) assert_equal orig, parse(json4) json5 = JSON(orig = 1 << 64) assert_equal orig, parse(json5) end def test_some_wrong_inputs assert_raise(ParserError) { parse('[] bla') } assert_raise(ParserError) { parse('[] 1') } assert_raise(ParserError) { parse('[] []') } assert_raise(ParserError) { parse('[] {}') } assert_raise(ParserError) { parse('{} []') } assert_raise(ParserError) { parse('{} {}') } assert_raise(ParserError) { parse('[NULL]') } assert_raise(ParserError) { parse('[FALSE]') } assert_raise(ParserError) { parse('[TRUE]') } assert_raise(ParserError) { parse('[07] ') } assert_raise(ParserError) { parse('[0a]') } assert_raise(ParserError) { parse('[1.]') } assert_raise(ParserError) { parse(' ') } end def test_symbolize_names assert_equal({ "foo" => "bar", "baz" => "quux" }, parse('{"foo":"bar", "baz":"quux"}')) assert_equal({ :foo => "bar", :baz => "quux" }, parse('{"foo":"bar", "baz":"quux"}', :symbolize_names => true)) assert_raise(ArgumentError) do parse('{}', :symbolize_names => true, :create_additions => true) end end def test_parse_comments json = < "value1", "key2" => "value2", "key3" => "value3" }, parse(json)) json = < "value1" }, parse(json)) end def test_nesting too_deep = '[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[["Too deep"]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]' too_deep_ary = eval too_deep assert_raise(JSON::NestingError) { parse too_deep } assert_raise(JSON::NestingError) { parse too_deep, :max_nesting => 100 } ok = parse too_deep, :max_nesting => 101 assert_equal too_deep_ary, ok ok = parse too_deep, :max_nesting => nil assert_equal too_deep_ary, ok ok = parse too_deep, :max_nesting => false assert_equal too_deep_ary, ok ok = parse too_deep, :max_nesting => 0 assert_equal too_deep_ary, ok end def test_backslash data = [ '\\.(?i:gif|jpe?g|png)$' ] json = '["\\\\.(?i:gif|jpe?g|png)$"]' assert_equal data, parse(json) # data = [ '\\"' ] json = '["\\\\\""]' assert_equal data, parse(json) # json = '["/"]' data = [ '/' ] assert_equal data, parse(json) # json = '["\""]' data = ['"'] assert_equal data, parse(json) # json = '["\\\'"]' data = ["'"] assert_equal data, parse(json) end class SubArray < Array def <<(v) @shifted = true super end def shifted? @shifted end end class SubArray2 < Array def to_json(*a) { JSON.create_id => self.class.name, 'ary' => to_a, }.to_json(*a) end def self.json_create(o) o.delete JSON.create_id o['ary'] end end class SubArrayWrapper def initialize @data = [] end attr_reader :data def [](index) @data[index] end def <<(value) @data << value @shifted = true end def shifted? @shifted end end def test_parse_array_custom_array_derived_class res = parse('[1,2]', :array_class => SubArray) assert_equal([1,2], res) assert_equal(SubArray, res.class) assert res.shifted? end def test_parse_array_custom_non_array_derived_class res = parse('[1,2]', :array_class => SubArrayWrapper) assert_equal([1,2], res.data) assert_equal(SubArrayWrapper, res.class) assert res.shifted? end def test_parse_object assert_equal({}, parse('{}')) assert_equal({}, parse(' { } ')) assert_equal({'foo'=>'bar'}, parse('{"foo":"bar"}')) assert_equal({'foo'=>'bar'}, parse(' { "foo" : "bar" } ')) end class SubHash < Hash def []=(k, v) @item_set = true super end def item_set? @item_set end end class SubHash2 < Hash def to_json(*a) { JSON.create_id => self.class.name, }.merge(self).to_json(*a) end def self.json_create(o) o.delete JSON.create_id self[o] end end class SubOpenStruct < OpenStruct def [](k) __send__(k) end def []=(k, v) @item_set = true __send__("#{k}=", v) end def item_set? @item_set end end def test_parse_object_custom_hash_derived_class res = parse('{"foo":"bar"}', :object_class => SubHash) assert_equal({"foo" => "bar"}, res) assert_equal(SubHash, res.class) assert res.item_set? end def test_parse_object_custom_non_hash_derived_class res = parse('{"foo":"bar"}', :object_class => SubOpenStruct) assert_equal "bar", res.foo assert_equal(SubOpenStruct, res.class) assert res.item_set? end def test_parse_generic_object res = parse( '{"foo":"bar", "baz":{}}', :object_class => JSON::GenericObject ) assert_equal(JSON::GenericObject, res.class) assert_equal "bar", res.foo assert_equal "bar", res["foo"] assert_equal "bar", res[:foo] assert_equal "bar", res.to_hash[:foo] assert_equal(JSON::GenericObject, res.baz.class) end def test_generate_core_subclasses_with_new_to_json obj = SubHash2["foo" => SubHash2["bar" => true]] obj_json = JSON(obj) obj_again = parse(obj_json, :create_additions => true) assert_kind_of SubHash2, obj_again assert_kind_of SubHash2, obj_again['foo'] assert obj_again['foo']['bar'] assert_equal obj, obj_again assert_equal ["foo"], JSON(JSON(SubArray2["foo"]), :create_additions => true) end def test_generate_core_subclasses_with_default_to_json assert_equal '{"foo":"bar"}', JSON(SubHash["foo" => "bar"]) assert_equal '["foo"]', JSON(SubArray["foo"]) end def test_generate_of_core_subclasses obj = SubHash["foo" => SubHash["bar" => true]] obj_json = JSON(obj) obj_again = JSON(obj_json) assert_kind_of Hash, obj_again assert_kind_of Hash, obj_again['foo'] assert obj_again['foo']['bar'] assert_equal obj, obj_again end def test_parsing_frozen_ascii8bit_string assert_equal( { 'foo' => 'bar' }, JSON('{ "foo": "bar" }'.force_encoding(Encoding::ASCII_8BIT).freeze) ) end private def assert_equal_float(expected, actual, delta = 1e-2) Array === expected and expected = expected.first Array === actual and actual = actual.first assert_in_delta(expected, actual, delta) end end ruby-json-2.1.0+dfsg.orig/tests/json_string_matching_test.rb0000644000175000017500000000141513113111601023622 0ustar boutilboutil#frozen_string_literal: false require 'test_helper' require 'time' class JSONStringMatchingTest < Test::Unit::TestCase include JSON class TestTime < ::Time def self.json_create(string) Time.parse(string) end def to_json(*) %{"#{strftime('%FT%T%z')}"} end def ==(other) to_i == other.to_i end end def test_match_date t = TestTime.new t_json = [ t ].to_json time_regexp = /\A\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}[+-]\d{4}\z/ assert_equal [ t ], parse( t_json, :create_additions => true, :match_string => { time_regexp => TestTime } ) assert_equal [ t.strftime('%FT%T%z') ], parse( t_json, :match_string => { time_regexp => TestTime } ) end end ruby-json-2.1.0+dfsg.orig/tests/json_ext_parser_test.rb0000644000175000017500000000066213113111601022621 0ustar boutilboutil#frozen_string_literal: false require 'test_helper' class JSONExtParserTest < Test::Unit::TestCase if defined?(JSON::Ext::Parser) def test_allocate parser = JSON::Ext::Parser.new("{}") assert_raise(TypeError, '[ruby-core:35079]') do parser.__send__(:initialize, "{}") end parser = JSON::Ext::Parser.allocate assert_raise(TypeError, '[ruby-core:35079]') { parser.source } end end end ruby-json-2.1.0+dfsg.orig/tests/test_helper.rb0000644000175000017500000000050713113111601020671 0ustar boutilboutilcase ENV['JSON'] when 'pure' $:.unshift 'lib' require 'json/pure' when 'ext' $:.unshift 'ext', 'lib' require 'json/ext' else $:.unshift 'ext', 'lib' require 'json' end require 'test/unit' begin require 'byebug' rescue LoadError end if ENV['START_SIMPLECOV'].to_i == 1 require 'simplecov' SimpleCov.start end ruby-json-2.1.0+dfsg.orig/tests/json_fixtures_test.rb0000644000175000017500000000170513113111601022315 0ustar boutilboutil#frozen_string_literal: false require 'test_helper' class JSONFixturesTest < Test::Unit::TestCase def setup fixtures = File.join(File.dirname(__FILE__), 'fixtures/{fail,pass}.json') passed, failed = Dir[fixtures].partition { |f| f['pass'] } @passed = passed.inject([]) { |a, f| a << [ f, File.read(f) ] }.sort @failed = failed.inject([]) { |a, f| a << [ f, File.read(f) ] }.sort end def test_passing for name, source in @passed begin assert JSON.parse(source), "Did not pass for fixture '#{name}': #{source.inspect}" rescue => e warn "\nCaught #{e.class}(#{e}) for fixture '#{name}': #{source.inspect}\n#{e.backtrace * "\n"}" raise e end end end def test_failing for name, source in @failed assert_raise(JSON::ParserError, JSON::NestingError, "Did not fail for fixture '#{name}': #{source.inspect}") do JSON.parse(source) end end end end ruby-json-2.1.0+dfsg.orig/tests/json_generic_object_test.rb0000644000175000017500000000463413113111601023412 0ustar boutilboutil#frozen_string_literal: false require 'test_helper' class JSONGenericObjectTest < Test::Unit::TestCase include JSON def setup @go = GenericObject[ :a => 1, :b => 2 ] end def test_attributes assert_equal 1, @go.a assert_equal 1, @go[:a] assert_equal 2, @go.b assert_equal 2, @go[:b] assert_nil @go.c assert_nil @go[:c] end def test_generate_json switch_json_creatable do assert_equal @go, JSON(JSON(@go), :create_additions => true) end end def test_parse_json assert_kind_of Hash, JSON( '{ "json_class": "JSON::GenericObject", "a": 1, "b": 2 }', :create_additions => true ) switch_json_creatable do assert_equal @go, l = JSON( '{ "json_class": "JSON::GenericObject", "a": 1, "b": 2 }', :create_additions => true ) assert_equal 1, l.a assert_equal @go, l = JSON('{ "a": 1, "b": 2 }', :object_class => GenericObject) assert_equal 1, l.a assert_equal GenericObject[:a => GenericObject[:b => 2]], l = JSON('{ "a": { "b": 2 } }', :object_class => GenericObject) assert_equal 2, l.a.b end end def test_from_hash result = GenericObject.from_hash( :foo => { :bar => { :baz => true }, :quux => [ { :foobar => true } ] }) assert_kind_of GenericObject, result.foo assert_kind_of GenericObject, result.foo.bar assert_equal true, result.foo.bar.baz assert_kind_of GenericObject, result.foo.quux.first assert_equal true, result.foo.quux.first.foobar assert_equal true, GenericObject.from_hash(true) end def test_json_generic_object_load empty = JSON::GenericObject.load(nil) assert_kind_of JSON::GenericObject, empty simple_json = '{"json_class":"JSON::GenericObject","hello":"world"}' simple = JSON::GenericObject.load(simple_json) assert_kind_of JSON::GenericObject, simple assert_equal "world", simple.hello converting = JSON::GenericObject.load('{ "hello": "world" }') assert_kind_of JSON::GenericObject, converting assert_equal "world", converting.hello json = JSON::GenericObject.dump(JSON::GenericObject[:hello => 'world']) assert_equal JSON(json), JSON('{"json_class":"JSON::GenericObject","hello":"world"}') end private def switch_json_creatable JSON::GenericObject.json_creatable = true yield ensure JSON::GenericObject.json_creatable = false end end ruby-json-2.1.0+dfsg.orig/tests/json_addition_test.rb0000644000175000017500000001424513113111601022242 0ustar boutilboutil#frozen_string_literal: false require 'test_helper' require 'json/add/core' require 'json/add/complex' require 'json/add/rational' require 'json/add/bigdecimal' require 'json/add/ostruct' require 'date' class JSONAdditionTest < Test::Unit::TestCase include JSON class A def initialize(a) @a = a end attr_reader :a def ==(other) a == other.a end def self.json_create(object) new(*object['args']) end def to_json(*args) { 'json_class' => self.class.name, 'args' => [ @a ], }.to_json(*args) end end class A2 < A def to_json(*args) { 'json_class' => self.class.name, 'args' => [ @a ], }.to_json(*args) end end class B def self.json_creatable? false end def to_json(*args) { 'json_class' => self.class.name, }.to_json(*args) end end class C def self.json_creatable? false end def to_json(*args) { 'json_class' => 'JSONAdditionTest::Nix', }.to_json(*args) end end def test_extended_json a = A.new(666) assert A.json_creatable? json = generate(a) a_again = parse(json, :create_additions => true) assert_kind_of a.class, a_again assert_equal a, a_again end def test_extended_json_default a = A.new(666) assert A.json_creatable? json = generate(a) a_hash = parse(json) assert_kind_of Hash, a_hash end def test_extended_json_disabled a = A.new(666) assert A.json_creatable? json = generate(a) a_again = parse(json, :create_additions => true) assert_kind_of a.class, a_again assert_equal a, a_again a_hash = parse(json, :create_additions => false) assert_kind_of Hash, a_hash assert_equal( {"args"=>[666], "json_class"=>"JSONAdditionTest::A"}.sort_by { |k,| k }, a_hash.sort_by { |k,| k } ) end def test_extended_json_fail1 b = B.new assert !B.json_creatable? json = generate(b) assert_equal({ "json_class"=>"JSONAdditionTest::B" }, parse(json)) end def test_extended_json_fail2 c = C.new assert !C.json_creatable? json = generate(c) assert_raise(ArgumentError, NameError) { parse(json, :create_additions => true) } end def test_raw_strings raw = '' raw.respond_to?(:encode!) and raw.encode!(Encoding::ASCII_8BIT) raw_array = [] for i in 0..255 raw << i raw_array << i end json = raw.to_json_raw json_raw_object = raw.to_json_raw_object hash = { 'json_class' => 'String', 'raw'=> raw_array } assert_equal hash, json_raw_object assert_match(/\A\{.*\}\z/, json) assert_match(/"json_class":"String"/, json) assert_match(/"raw":\[0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64,65,66,67,68,69,70,71,72,73,74,75,76,77,78,79,80,81,82,83,84,85,86,87,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127,128,129,130,131,132,133,134,135,136,137,138,139,140,141,142,143,144,145,146,147,148,149,150,151,152,153,154,155,156,157,158,159,160,161,162,163,164,165,166,167,168,169,170,171,172,173,174,175,176,177,178,179,180,181,182,183,184,185,186,187,188,189,190,191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255\]/, json) raw_again = parse(json, :create_additions => true) assert_equal raw, raw_again end MyJsonStruct = Struct.new 'MyJsonStruct', :foo, :bar def test_core t = Time.now assert_equal t, JSON(JSON(t), :create_additions => true) d = Date.today assert_equal d, JSON(JSON(d), :create_additions => true) d = DateTime.civil(2007, 6, 14, 14, 57, 10, Rational(1, 12), 2299161) assert_equal d, JSON(JSON(d), :create_additions => true) assert_equal 1..10, JSON(JSON(1..10), :create_additions => true) assert_equal 1...10, JSON(JSON(1...10), :create_additions => true) assert_equal "a".."c", JSON(JSON("a".."c"), :create_additions => true) assert_equal "a"..."c", JSON(JSON("a"..."c"), :create_additions => true) s = MyJsonStruct.new 4711, 'foot' assert_equal s, JSON(JSON(s), :create_additions => true) struct = Struct.new :foo, :bar s = struct.new 4711, 'foot' assert_raise(JSONError) { JSON(s) } begin raise TypeError, "test me" rescue TypeError => e e_json = JSON.generate e e_again = JSON e_json, :create_additions => true assert_kind_of TypeError, e_again assert_equal e.message, e_again.message assert_equal e.backtrace, e_again.backtrace end assert_equal(/foo/, JSON(JSON(/foo/), :create_additions => true)) assert_equal(/foo/i, JSON(JSON(/foo/i), :create_additions => true)) end def test_utc_datetime now = Time.now d = DateTime.parse(now.to_s, :create_additions => true) # usual case assert_equal d, parse(d.to_json, :create_additions => true) d = DateTime.parse(now.utc.to_s) # of = 0 assert_equal d, parse(d.to_json, :create_additions => true) d = DateTime.civil(2008, 6, 17, 11, 48, 32, Rational(1,24)) assert_equal d, parse(d.to_json, :create_additions => true) d = DateTime.civil(2008, 6, 17, 11, 48, 32, Rational(12,24)) assert_equal d, parse(d.to_json, :create_additions => true) end def test_rational_complex assert_equal Rational(2, 9), parse(JSON(Rational(2, 9)), :create_additions => true) assert_equal Complex(2, 9), parse(JSON(Complex(2, 9)), :create_additions => true) end def test_bigdecimal assert_equal BigDecimal('3.141', 23), JSON(JSON(BigDecimal('3.141', 23)), :create_additions => true) assert_equal BigDecimal('3.141', 666), JSON(JSON(BigDecimal('3.141', 666)), :create_additions => true) end def test_ostruct o = OpenStruct.new # XXX this won't work; o.foo = { :bar => true } o.foo = { 'bar' => true } assert_equal o, parse(JSON(o), :create_additions => true) end end ruby-json-2.1.0+dfsg.orig/tests/json_encoding_test.rb0000644000175000017500000001004113113111601022223 0ustar boutilboutil# encoding: utf-8 #frozen_string_literal: false require 'test_helper' class JSONEncodingTest < Test::Unit::TestCase include JSON def setup @utf_8 = '"© ≠ €!"' @ascii_8bit = @utf_8.dup.force_encoding('ascii-8bit') @parsed = "© ≠ €!" @generated = '"\u00a9 \u2260 \u20ac!"' if String.method_defined?(:encode) @utf_16_data = @parsed.encode('utf-16be', 'utf-8') @utf_16be = @utf_8.encode('utf-16be', 'utf-8') @utf_16le = @utf_8.encode('utf-16le', 'utf-8') @utf_32be = @utf_8.encode('utf-32be', 'utf-8') @utf_32le = @utf_8.encode('utf-32le', 'utf-8') else require 'iconv' @utf_16_data, = Iconv.iconv('utf-16be', 'utf-8', @parsed) @utf_16be, = Iconv.iconv('utf-16be', 'utf-8', @utf_8) @utf_16le, = Iconv.iconv('utf-16le', 'utf-8', @utf_8) @utf_32be, = Iconv.iconv('utf-32be', 'utf-8', @utf_8) @utf_32le, = Iconv.iconv('utf-32le', 'utf-8', @utf_8) end end def test_parse assert_equal @parsed, JSON.parse(@ascii_8bit) assert_equal @parsed, JSON.parse(@utf_8) assert_equal @parsed, JSON.parse(@utf_16be) assert_equal @parsed, JSON.parse(@utf_16le) assert_equal @parsed, JSON.parse(@utf_32be) assert_equal @parsed, JSON.parse(@utf_32le) end def test_generate assert_equal @generated, JSON.generate(@parsed, :ascii_only => true) assert_equal @generated, JSON.generate(@utf_16_data, :ascii_only => true) end def test_unicode assert_equal '""', ''.to_json assert_equal '"\\b"', "\b".to_json assert_equal '"\u0001"', 0x1.chr.to_json assert_equal '"\u001f"', 0x1f.chr.to_json assert_equal '" "', ' '.to_json assert_equal "\"#{0x7f.chr}\"", 0x7f.chr.to_json utf8 = [ "© ≠ €! \01" ] json = '["© ≠ €! \u0001"]' assert_equal json, utf8.to_json(:ascii_only => false) assert_equal utf8, parse(json) json = '["\u00a9 \u2260 \u20ac! \u0001"]' assert_equal json, utf8.to_json(:ascii_only => true) assert_equal utf8, parse(json) utf8 = ["\343\201\202\343\201\204\343\201\206\343\201\210\343\201\212"] json = "[\"\343\201\202\343\201\204\343\201\206\343\201\210\343\201\212\"]" assert_equal utf8, parse(json) assert_equal json, utf8.to_json(:ascii_only => false) utf8 = ["\343\201\202\343\201\204\343\201\206\343\201\210\343\201\212"] assert_equal utf8, parse(json) json = "[\"\\u3042\\u3044\\u3046\\u3048\\u304a\"]" assert_equal json, utf8.to_json(:ascii_only => true) assert_equal utf8, parse(json) utf8 = ['საქართველო'] json = '["საქართველო"]' assert_equal json, utf8.to_json(:ascii_only => false) json = "[\"\\u10e1\\u10d0\\u10e5\\u10d0\\u10e0\\u10d7\\u10d5\\u10d4\\u10da\\u10dd\"]" assert_equal json, utf8.to_json(:ascii_only => true) assert_equal utf8, parse(json) assert_equal '["Ã"]', generate(["Ã"], :ascii_only => false) assert_equal '["\\u00c3"]', generate(["Ã"], :ascii_only => true) assert_equal ["€"], parse('["\u20ac"]') utf8 = ["\xf0\xa0\x80\x81"] json = "[\"\xf0\xa0\x80\x81\"]" assert_equal json, generate(utf8, :ascii_only => false) assert_equal utf8, parse(json) json = '["\ud840\udc01"]' assert_equal json, generate(utf8, :ascii_only => true) assert_equal utf8, parse(json) assert_raise(JSON::ParserError) { parse('"\u"') } assert_raise(JSON::ParserError) { parse('"\ud800"') } end def test_chars (0..0x7f).each do |i| json = '["\u%04x"]' % i if RUBY_VERSION >= "1.9." i = i.chr end assert_equal i, parse(json).first[0] if i == ?\b generated = generate(["" << i]) assert '["\b"]' == generated || '["\10"]' == generated elsif [?\n, ?\r, ?\t, ?\f].include?(i) assert_equal '[' << ('' << i).dump << ']', generate(["" << i]) elsif i.chr < 0x20.chr assert_equal json, generate(["" << i]) end end assert_raise(JSON::GeneratorError) do generate(["\x80"], :ascii_only => true) end assert_equal "\302\200", parse('["\u0080"]').first end end ruby-json-2.1.0+dfsg.orig/tests/fixtures/0000755000175000017500000000000013113111601017675 5ustar boutilboutilruby-json-2.1.0+dfsg.orig/tests/fixtures/fail6.json0000644000175000017500000000003213113111601021564 0ustar boutilboutil[ , "<-- missing value"]ruby-json-2.1.0+dfsg.orig/tests/fixtures/fail8.json0000644000175000017500000000002013113111601021563 0ustar boutilboutil["Extra close"]]ruby-json-2.1.0+dfsg.orig/tests/fixtures/pass1.json0000644000175000017500000000255313113111601021624 0ustar boutilboutil[ "JSON Test Pattern pass1", {"object with 1 member":["array with 1 element"]}, {}, [], -42, true, false, null, { "integer": 1234567890, "real": -9876.543210, "e": 0.123456789e-12, "E": 1.234567890E+34, "": 23456789012E666, "zero": 0, "one": 1, "space": " ", "quote": "\"", "backslash": "\\", "controls": "\b\f\n\r\t", "slash": "/ & \/", "alpha": "abcdefghijklmnopqrstuvwyz", "ALPHA": "ABCDEFGHIJKLMNOPQRSTUVWYZ", "digit": "0123456789", "special": "`1~!@#$%^&*()_+-={':[,]}|;.?", "hex": "\u0123\u4567\u89AB\uCDEF\uabcd\uef4A", "true": true, "false": false, "null": null, "array":[ ], "object":{ }, "address": "50 St. James Street", "url": "http://www.JSON.org/", "comment": "// /* */": " ", " s p a c e d " :[1,2 , 3 , 4 , 5 , 6 ,7 ], "compact": [1,2,3,4,5,6,7], "jsontext": "{\"object with 1 member\":[\"array with 1 element\"]}", "quotes": "" \u0022 %22 0x22 034 "", "\/\\\"\uCAFE\uBABE\uAB98\uFCDE\ubcda\uef4A\b\f\n\r\t`1~!@#$%^&*()_+-=[]{}|;:',./<>?" : "A key can be any string" }, 0.5 ,98.6 , 99.44 , 1066 ,"rosebud"]ruby-json-2.1.0+dfsg.orig/tests/fixtures/fail9.json0000644000175000017500000000002613113111601021572 0ustar boutilboutil{"Extra comma": true,}ruby-json-2.1.0+dfsg.orig/tests/fixtures/fail18.json0000644000175000017500000000032513113111601021654 0ustar boutilboutil[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[["Too deep"]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]] ruby-json-2.1.0+dfsg.orig/tests/fixtures/fail19.json0000644000175000017500000000002613113111601021653 0ustar boutilboutil{"Missing colon" null}ruby-json-2.1.0+dfsg.orig/tests/fixtures/pass15.json0000644000175000017500000000004213113111601021700 0ustar boutilboutil["Illegal backslash escape: \x15"]ruby-json-2.1.0+dfsg.orig/tests/fixtures/fail28.json0000644000175000017500000000001713113111601021653 0ustar boutilboutil["line\ break"]ruby-json-2.1.0+dfsg.orig/tests/fixtures/fail21.json0000644000175000017500000000004013113111601021640 0ustar boutilboutil{"Comma instead of colon", null}ruby-json-2.1.0+dfsg.orig/tests/fixtures/pass2.json0000644000175000017500000000006413113111601021620 0ustar boutilboutil[[[[[[[[[[[[[[[[[[["Not too deep"]]]]]]]]]]]]]]]]]]]ruby-json-2.1.0+dfsg.orig/tests/fixtures/pass3.json0000644000175000017500000000022413113111601021617 0ustar boutilboutil{ "JSON Test Pattern pass3": { "The outermost value": "must be an object or array.", "In this test": "It is an object." } } ruby-json-2.1.0+dfsg.orig/tests/fixtures/fail27.json0000644000175000017500000000001613113111601021651 0ustar boutilboutil["line break"]ruby-json-2.1.0+dfsg.orig/tests/fixtures/fail5.json0000644000175000017500000000003013113111601021561 0ustar boutilboutil["double extra comma",,]ruby-json-2.1.0+dfsg.orig/tests/fixtures/fail3.json0000644000175000017500000000004513113111601021565 0ustar boutilboutil{unquoted_key: "keys must be quoted"}ruby-json-2.1.0+dfsg.orig/tests/fixtures/pass17.json0000644000175000017500000000004213113111601021702 0ustar boutilboutil["Illegal backslash escape: \017"]ruby-json-2.1.0+dfsg.orig/tests/fixtures/fail4.json0000644000175000017500000000002013113111601021557 0ustar boutilboutil["extra comma",]ruby-json-2.1.0+dfsg.orig/tests/fixtures/fail10.json0000644000175000017500000000007213113111601021643 0ustar boutilboutil{"Extra value after close": true} "misplaced quoted value"ruby-json-2.1.0+dfsg.orig/tests/fixtures/fail25.json0000644000175000017500000000003513113111601021650 0ustar boutilboutil["tab character in string "] ruby-json-2.1.0+dfsg.orig/tests/fixtures/fail13.json0000644000175000017500000000005313113111601021645 0ustar boutilboutil{"Numbers cannot have leading zeroes": 013}ruby-json-2.1.0+dfsg.orig/tests/fixtures/fail22.json0000644000175000017500000000004113113111601021642 0ustar boutilboutil["Colon instead of comma": false]ruby-json-2.1.0+dfsg.orig/tests/fixtures/fail20.json0000644000175000017500000000002713113111601021644 0ustar boutilboutil{"Double colon":: null}ruby-json-2.1.0+dfsg.orig/tests/fixtures/fail23.json0000644000175000017500000000002413113111601021644 0ustar boutilboutil["Bad value", truth]ruby-json-2.1.0+dfsg.orig/tests/fixtures/pass16.json0000644000175000017500000000004013113111601021677 0ustar boutilboutil["Illegal backslash escape: \'"]ruby-json-2.1.0+dfsg.orig/tests/fixtures/fail24.json0000644000175000017500000000002013113111601021641 0ustar boutilboutil['single quote']ruby-json-2.1.0+dfsg.orig/tests/fixtures/fail14.json0000644000175000017500000000003713113111601021650 0ustar boutilboutil{"Numbers cannot be hex": 0x14}ruby-json-2.1.0+dfsg.orig/tests/fixtures/fail2.json0000644000175000017500000000002113113111601021556 0ustar boutilboutil["Unclosed array"ruby-json-2.1.0+dfsg.orig/tests/fixtures/fail11.json0000644000175000017500000000003513113111601021643 0ustar boutilboutil{"Illegal expression": 1 + 2}ruby-json-2.1.0+dfsg.orig/tests/fixtures/obsolete_fail1.json0000644000175000017500000000007513113111601023462 0ustar boutilboutil"A JSON payload should be an object or array, not a string." ruby-json-2.1.0+dfsg.orig/tests/fixtures/fail7.json0000644000175000017500000000003213113111601021565 0ustar boutilboutil["Comma after the close"],ruby-json-2.1.0+dfsg.orig/tests/fixtures/fail12.json0000644000175000017500000000003713113111601021646 0ustar boutilboutil{"Illegal invocation": alert()}ruby-json-2.1.0+dfsg.orig/tests/fixtures/pass26.json0000644000175000017500000000004613113111601021706 0ustar boutilboutil["tab\ character\ in\ string\ "]ruby-json-2.1.0+dfsg.orig/tests/json_generator_test.rb0000644000175000017500000002600513113111601022432 0ustar boutilboutil#!/usr/bin/env ruby # encoding: utf-8 # frozen_string_literal: false require 'test_helper' class JSONGeneratorTest < Test::Unit::TestCase include JSON def setup @hash = { 'a' => 2, 'b' => 3.141, 'c' => 'c', 'd' => [ 1, "b", 3.14 ], 'e' => { 'foo' => 'bar' }, 'g' => "\"\0\037", 'h' => 1000.0, 'i' => 0.001 } @json2 = '{"a":2,"b":3.141,"c":"c","d":[1,"b",3.14],"e":{"foo":"bar"},' + '"g":"\\"\\u0000\\u001f","h":1000.0,"i":0.001}' @json3 = <<'EOT'.chomp { "a": 2, "b": 3.141, "c": "c", "d": [ 1, "b", 3.14 ], "e": { "foo": "bar" }, "g": "\"\u0000\u001f", "h": 1000.0, "i": 0.001 } EOT end def test_generate json = generate(@hash) assert_equal(parse(@json2), parse(json)) json = JSON[@hash] assert_equal(parse(@json2), parse(json)) parsed_json = parse(json) assert_equal(@hash, parsed_json) json = generate({1=>2}) assert_equal('{"1":2}', json) parsed_json = parse(json) assert_equal({"1"=>2}, parsed_json) assert_equal '666', generate(666) end def test_generate_pretty json = pretty_generate(@hash) # hashes aren't (insertion) ordered on every ruby implementation # assert_equal(@json3, json) assert_equal(parse(@json3), parse(json)) parsed_json = parse(json) assert_equal(@hash, parsed_json) json = pretty_generate({1=>2}) assert_equal(<<'EOT'.chomp, json) { "1": 2 } EOT parsed_json = parse(json) assert_equal({"1"=>2}, parsed_json) assert_equal '666', pretty_generate(666) end def test_generate_custom state = State.new(:space_before => " ", :space => " ", :indent => "", :object_nl => "\n", :array_nl => "") json = generate({1=>{2=>3,4=>[5,6]}}, state) assert_equal(<<'EOT'.chomp, json) { "1" : { "2" : 3, "4" : [5,6] } } EOT end def test_fast_generate json = fast_generate(@hash) assert_equal(parse(@json2), parse(json)) parsed_json = parse(json) assert_equal(@hash, parsed_json) json = fast_generate({1=>2}) assert_equal('{"1":2}', json) parsed_json = parse(json) assert_equal({"1"=>2}, parsed_json) assert_equal '666', fast_generate(666) end def test_own_state state = State.new json = generate(@hash, state) assert_equal(parse(@json2), parse(json)) parsed_json = parse(json) assert_equal(@hash, parsed_json) json = generate({1=>2}, state) assert_equal('{"1":2}', json) parsed_json = parse(json) assert_equal({"1"=>2}, parsed_json) assert_equal '666', generate(666, state) end def test_states json = generate({1=>2}, nil) assert_equal('{"1":2}', json) s = JSON.state.new assert s.check_circular? assert s[:check_circular?] h = { 1=>2 } h[3] = h assert_raise(JSON::NestingError) { generate(h) } assert_raise(JSON::NestingError) { generate(h, s) } s = JSON.state.new a = [ 1, 2 ] a << a assert_raise(JSON::NestingError) { generate(a, s) } assert s.check_circular? assert s[:check_circular?] end def test_pretty_state state = PRETTY_STATE_PROTOTYPE.dup assert_equal({ :allow_nan => false, :array_nl => "\n", :ascii_only => false, :buffer_initial_length => 1024, :depth => 0, :indent => " ", :max_nesting => 100, :object_nl => "\n", :space => " ", :space_before => "", }.sort_by { |n,| n.to_s }, state.to_h.sort_by { |n,| n.to_s }) end def test_safe_state state = SAFE_STATE_PROTOTYPE.dup assert_equal({ :allow_nan => false, :array_nl => "", :ascii_only => false, :buffer_initial_length => 1024, :depth => 0, :indent => "", :max_nesting => 100, :object_nl => "", :space => "", :space_before => "", }.sort_by { |n,| n.to_s }, state.to_h.sort_by { |n,| n.to_s }) end def test_fast_state state = FAST_STATE_PROTOTYPE.dup assert_equal({ :allow_nan => false, :array_nl => "", :ascii_only => false, :buffer_initial_length => 1024, :depth => 0, :indent => "", :max_nesting => 0, :object_nl => "", :space => "", :space_before => "", }.sort_by { |n,| n.to_s }, state.to_h.sort_by { |n,| n.to_s }) end def test_allow_nan assert_raise(GeneratorError) { generate([JSON::NaN]) } assert_equal '[NaN]', generate([JSON::NaN], :allow_nan => true) assert_raise(GeneratorError) { fast_generate([JSON::NaN]) } assert_raise(GeneratorError) { pretty_generate([JSON::NaN]) } assert_equal "[\n NaN\n]", pretty_generate([JSON::NaN], :allow_nan => true) assert_raise(GeneratorError) { generate([JSON::Infinity]) } assert_equal '[Infinity]', generate([JSON::Infinity], :allow_nan => true) assert_raise(GeneratorError) { fast_generate([JSON::Infinity]) } assert_raise(GeneratorError) { pretty_generate([JSON::Infinity]) } assert_equal "[\n Infinity\n]", pretty_generate([JSON::Infinity], :allow_nan => true) assert_raise(GeneratorError) { generate([JSON::MinusInfinity]) } assert_equal '[-Infinity]', generate([JSON::MinusInfinity], :allow_nan => true) assert_raise(GeneratorError) { fast_generate([JSON::MinusInfinity]) } assert_raise(GeneratorError) { pretty_generate([JSON::MinusInfinity]) } assert_equal "[\n -Infinity\n]", pretty_generate([JSON::MinusInfinity], :allow_nan => true) end def test_depth ary = []; ary << ary assert_equal 0, JSON::SAFE_STATE_PROTOTYPE.depth assert_raise(JSON::NestingError) { generate(ary) } assert_equal 0, JSON::SAFE_STATE_PROTOTYPE.depth assert_equal 0, JSON::PRETTY_STATE_PROTOTYPE.depth assert_raise(JSON::NestingError) { JSON.pretty_generate(ary) } assert_equal 0, JSON::PRETTY_STATE_PROTOTYPE.depth s = JSON.state.new assert_equal 0, s.depth assert_raise(JSON::NestingError) { ary.to_json(s) } assert_equal 100, s.depth end def test_buffer_initial_length s = JSON.state.new assert_equal 1024, s.buffer_initial_length s.buffer_initial_length = 0 assert_equal 1024, s.buffer_initial_length s.buffer_initial_length = -1 assert_equal 1024, s.buffer_initial_length s.buffer_initial_length = 128 assert_equal 128, s.buffer_initial_length end def test_gc if respond_to?(:assert_in_out_err) assert_in_out_err(%w[-rjson --disable-gems], <<-EOS, [], []) bignum_too_long_to_embed_as_string = 1234567890123456789012345 expect = bignum_too_long_to_embed_as_string.to_s GC.stress = true 10.times do |i| tmp = bignum_too_long_to_embed_as_string.to_json raise "'\#{expect}' is expected, but '\#{tmp}'" unless tmp == expect end EOS end end if GC.respond_to?(:stress=) def test_configure_using_configure_and_merge numbered_state = { :indent => "1", :space => '2', :space_before => '3', :object_nl => '4', :array_nl => '5' } state1 = JSON.state.new state1.merge(numbered_state) assert_equal '1', state1.indent assert_equal '2', state1.space assert_equal '3', state1.space_before assert_equal '4', state1.object_nl assert_equal '5', state1.array_nl state2 = JSON.state.new state2.configure(numbered_state) assert_equal '1', state2.indent assert_equal '2', state2.space assert_equal '3', state2.space_before assert_equal '4', state2.object_nl assert_equal '5', state2.array_nl end def test_configure_hash_conversion state = JSON.state.new state.configure(:indent => '1') assert_equal '1', state.indent state = JSON.state.new foo = 'foo' assert_raise(TypeError) do state.configure(foo) end def foo.to_h { :indent => '2' } end state.configure(foo) assert_equal '2', state.indent end if defined?(JSON::Ext::Generator) def test_broken_bignum # [ruby-core:38867] pid = fork do x = 1 << 64 x.class.class_eval do def to_s end end begin JSON::Ext::Generator::State.new.generate(x) exit 1 rescue TypeError exit 0 end end _, status = Process.waitpid2(pid) assert status.success? rescue NotImplementedError # forking to avoid modifying core class of a parent process and # introducing race conditions of tests are run in parallel end end def test_hash_likeness_set_symbol state = JSON.state.new assert_equal nil, state[:foo] assert_equal nil.class, state[:foo].class assert_equal nil, state['foo'] state[:foo] = :bar assert_equal :bar, state[:foo] assert_equal :bar, state['foo'] state_hash = state.to_hash assert_kind_of Hash, state_hash assert_equal :bar, state_hash[:foo] end def test_hash_likeness_set_string state = JSON.state.new assert_equal nil, state[:foo] assert_equal nil, state['foo'] state['foo'] = :bar assert_equal :bar, state[:foo] assert_equal :bar, state['foo'] state_hash = state.to_hash assert_kind_of Hash, state_hash assert_equal :bar, state_hash[:foo] end def test_json_generate assert_raise JSON::GeneratorError do assert_equal true, generate(["\xea"]) end end def test_nesting too_deep = '[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[["Too deep"]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]' too_deep_ary = eval too_deep assert_raise(JSON::NestingError) { generate too_deep_ary } assert_raise(JSON::NestingError) { generate too_deep_ary, :max_nesting => 100 } ok = generate too_deep_ary, :max_nesting => 101 assert_equal too_deep, ok ok = generate too_deep_ary, :max_nesting => nil assert_equal too_deep, ok ok = generate too_deep_ary, :max_nesting => false assert_equal too_deep, ok ok = generate too_deep_ary, :max_nesting => 0 assert_equal too_deep, ok end def test_backslash data = [ '\\.(?i:gif|jpe?g|png)$' ] json = '["\\\\.(?i:gif|jpe?g|png)$"]' assert_equal json, generate(data) # data = [ '\\"' ] json = '["\\\\\""]' assert_equal json, generate(data) # data = [ '/' ] json = '["/"]' assert_equal json, generate(data) # data = ['"'] json = '["\""]' assert_equal json, generate(data) # data = ["'"] json = '["\\\'"]' assert_equal '["\'"]', generate(data) end def test_string_subclass s = Class.new(String) do def to_s; self; end undef to_json end assert_nothing_raised(SystemStackError) do assert_equal '["foo"]', JSON.generate([s.new('foo')]) end end end ruby-json-2.1.0+dfsg.orig/.travis.yml0000644000175000017500000000060313113111601016772 0ustar boutilboutil# Passes arguments to bundle install (http://gembundler.com/man/bundle-install.1.html) #bundler_args: --binstubs language: ruby # Specify which ruby versions you wish to run your tests on, each version will be used rvm: - 1.9.3 - 2.0.0 - 2.1 - 2.2 - 2.3.3 - 2.4.1 - jruby - ruby-head matrix: allow_failures: - rvm: ruby-head script: "bundle exec rake" sudo: false ruby-json-2.1.0+dfsg.orig/README.md0000644000175000017500000002717413113111601016154 0ustar boutilboutil# JSON implementation for Ruby ![Travis Widget] [Travis Widget]: http://travis-ci.org/flori/json.svg?branch=master ## Description This is a implementation of the JSON specification according to RFC 7159 http://www.ietf.org/rfc/rfc7159.txt . Starting from version 1.0.0 on there will be two variants available: * A pure ruby variant, that relies on the iconv and the stringscan extensions, which are both part of the ruby standard library. * The quite a bit faster native extension variant, which is in parts implemented in C or Java and comes with its own unicode conversion functions and a parser generated by the ragel state machine compiler http://www.complang.org/ragel/ . Both variants of the JSON generator generate UTF-8 character sequences by default. If an :ascii\_only option with a true value is given, they escape all non-ASCII and control characters with \uXXXX escape sequences, and support UTF-16 surrogate pairs in order to be able to generate the whole range of unicode code points. All strings, that are to be encoded as JSON strings, should be UTF-8 byte sequences on the Ruby side. To encode raw binary strings, that aren't UTF-8 encoded, please use the to\_json\_raw\_object method of String (which produces an object, that contains a byte array) and decode the result on the receiving endpoint. ## Installation It's recommended to use the extension variant of JSON, because it's faster than the pure ruby variant. If you cannot build it on your system, you can settle for the latter. Just type into the command line as root: ``` # rake install ``` The above command will build the extensions and install them on your system. ``` # rake install_pure ``` or ``` # ruby install.rb ``` will just install the pure ruby implementation of JSON. If you use Rubygems you can type ``` # gem install json ``` instead, to install the newest JSON version. There is also a pure ruby json only variant of the gem, that can be installed with: ``` # gem install json_pure ``` ## Compiling the extensions yourself If you want to create the `parser.c` file from its `parser.rl` file or draw nice graphviz images of the state machines, you need ragel from: http://www.complang.org/ragel/ ## Usage To use JSON you can ```ruby require 'json' ``` to load the installed variant (either the extension `'json'` or the pure variant `'json_pure'`). If you have installed the extension variant, you can pick either the extension variant or the pure variant by typing ```ruby require 'json/ext' ``` or ```ruby require 'json/pure' ``` Now you can parse a JSON document into a ruby data structure by calling ```ruby JSON.parse(document) ``` If you want to generate a JSON document from a ruby data structure call ```ruby JSON.generate(data) ``` You can also use the `pretty_generate` method (which formats the output more verbosely and nicely) or `fast_generate` (which doesn't do any of the security checks generate performs, e. g. nesting deepness checks). There are also the JSON and JSON[] methods which use parse on a String or generate a JSON document from an array or hash: ```ruby document = JSON 'test' => 23 # => "{\"test\":23}" document = JSON['test' => 23] # => "{\"test\":23}" ``` and ```ruby data = JSON '{"test":23}' # => {"test"=>23} data = JSON['{"test":23}'] # => {"test"=>23} ``` You can choose to load a set of common additions to ruby core's objects if you ```ruby require 'json/add/core' ``` After requiring this you can, e. g., serialise/deserialise Ruby ranges: ```ruby JSON JSON(1..10) # => 1..10 ``` To find out how to add JSON support to other or your own classes, read the section "More Examples" below. To get the best compatibility to rails' JSON implementation, you can ```ruby require 'json/add/rails' ``` Both of the additions attempt to require `'json'` (like above) first, if it has not been required yet. ## More Examples To create a JSON document from a ruby data structure, you can call `JSON.generate` like that: ```ruby json = JSON.generate [1, 2, {"a"=>3.141}, false, true, nil, 4..10] # => "[1,2,{\"a\":3.141},false,true,null,\"4..10\"]" ``` To get back a ruby data structure from a JSON document, you have to call JSON.parse on it: ```ruby JSON.parse json # => [1, 2, {"a"=>3.141}, false, true, nil, "4..10"] ``` Note, that the range from the original data structure is a simple string now. The reason for this is, that JSON doesn't support ranges or arbitrary classes. In this case the json library falls back to call `Object#to_json`, which is the same as `#to_s.to_json`. It's possible to add JSON support serialization to arbitrary classes by simply implementing a more specialized version of the `#to_json method`, that should return a JSON object (a hash converted to JSON with `#to_json`) like this (don't forget the `*a` for all the arguments): ```ruby class Range def to_json(*a) { 'json_class' => self.class.name, # = 'Range' 'data' => [ first, last, exclude_end? ] }.to_json(*a) end end ``` The hash key `json_class` is the class, that will be asked to deserialise the JSON representation later. In this case it's `Range`, but any namespace of the form `A::B` or `::A::B` will do. All other keys are arbitrary and can be used to store the necessary data to configure the object to be deserialised. If a the key `json_class` is found in a JSON object, the JSON parser checks if the given class responds to the `json_create` class method. If so, it is called with the JSON object converted to a Ruby hash. So a range can be deserialised by implementing `Range.json_create` like this: ```ruby class Range def self.json_create(o) new(*o['data']) end end ``` Now it possible to serialise/deserialise ranges as well: ```ruby json = JSON.generate [1, 2, {"a"=>3.141}, false, true, nil, 4..10] # => "[1,2,{\"a\":3.141},false,true,null,{\"json_class\":\"Range\",\"data\":[4,10,false]}]" JSON.parse json # => [1, 2, {"a"=>3.141}, false, true, nil, 4..10] ``` `JSON.generate` always creates the shortest possible string representation of a ruby data structure in one line. This is good for data storage or network protocols, but not so good for humans to read. Fortunately there's also `JSON.pretty_generate` (or `JSON.pretty_generate`) that creates a more readable output: ```ruby puts JSON.pretty_generate([1, 2, {"a"=>3.141}, false, true, nil, 4..10]) [ 1, 2, { "a": 3.141 }, false, true, null, { "json_class": "Range", "data": [ 4, 10, false ] } ] ``` There are also the methods `Kernel#j` for generate, and `Kernel#jj` for `pretty_generate` output to the console, that work analogous to Core Ruby's `p` and the `pp` library's `pp` methods. The script `tools/server.rb` contains a small example if you want to test, how receiving a JSON object from a webrick server in your browser with the javasript prototype library http://www.prototypejs.org works. ## Speed Comparisons I have created some benchmark results (see the benchmarks/data-p4-3Ghz subdir of the package) for the JSON-parser to estimate the speed up in the C extension: ``` Comparing times (call_time_mean): 1 ParserBenchmarkExt#parser 900 repeats: 553.922304770 ( real) -> 21.500x 0.001805307 2 ParserBenchmarkYAML#parser 1000 repeats: 224.513358139 ( real) -> 8.714x 0.004454078 3 ParserBenchmarkPure#parser 1000 repeats: 26.755020642 ( real) -> 1.038x 0.037376163 4 ParserBenchmarkRails#parser 1000 repeats: 25.763381731 ( real) -> 1.000x 0.038814780 calls/sec ( time) -> speed covers secs/call ``` In the table above 1 is `JSON::Ext::Parser`, 2 is `YAML.load` with YAML compatbile JSON document, 3 is is `JSON::Pure::Parser`, and 4 is `ActiveSupport::JSON.decode`. The ActiveSupport JSON-decoder converts the input first to YAML and then uses the YAML-parser, the conversion seems to slow it down so much that it is only as fast as the `JSON::Pure::Parser`! If you look at the benchmark data you can see that this is mostly caused by the frequent high outliers - the median of the Rails-parser runs is still overall smaller than the median of the `JSON::Pure::Parser` runs: ``` Comparing times (call_time_median): 1 ParserBenchmarkExt#parser 900 repeats: 800.592479481 ( real) -> 26.936x 0.001249075 2 ParserBenchmarkYAML#parser 1000 repeats: 271.002390644 ( real) -> 9.118x 0.003690004 3 ParserBenchmarkRails#parser 1000 repeats: 30.227910865 ( real) -> 1.017x 0.033082008 4 ParserBenchmarkPure#parser 1000 repeats: 29.722384421 ( real) -> 1.000x 0.033644676 calls/sec ( time) -> speed covers secs/call ``` I have benchmarked the `JSON-Generator` as well. This generated a few more values, because there are different modes that also influence the achieved speed: ``` Comparing times (call_time_mean): 1 GeneratorBenchmarkExt#generator_fast 1000 repeats: 547.354332608 ( real) -> 15.090x 0.001826970 2 GeneratorBenchmarkExt#generator_safe 1000 repeats: 443.968212317 ( real) -> 12.240x 0.002252414 3 GeneratorBenchmarkExt#generator_pretty 900 repeats: 375.104545883 ( real) -> 10.341x 0.002665923 4 GeneratorBenchmarkPure#generator_fast 1000 repeats: 49.978706968 ( real) -> 1.378x 0.020008521 5 GeneratorBenchmarkRails#generator 1000 repeats: 38.531868759 ( real) -> 1.062x 0.025952543 6 GeneratorBenchmarkPure#generator_safe 1000 repeats: 36.927649925 ( real) -> 1.018x 7 (>=3859) 0.027079979 7 GeneratorBenchmarkPure#generator_pretty 1000 repeats: 36.272134441 ( real) -> 1.000x 6 (>=3859) 0.027569373 calls/sec ( time) -> speed covers secs/call ``` In the table above 1-3 are `JSON::Ext::Generator` methods. 4, 6, and 7 are `JSON::Pure::Generator` methods and 5 is the Rails JSON generator. It is now a bit faster than the `generator_safe` and `generator_pretty` methods of the pure variant but slower than the others. To achieve the fastest JSON document output, you can use the `fast_generate` method. Beware, that this will disable the checking for circular Ruby data structures, which may cause JSON to go into an infinite loop. Here are the median comparisons for completeness' sake: ``` Comparing times (call_time_median): 1 GeneratorBenchmarkExt#generator_fast 1000 repeats: 708.258020939 ( real) -> 16.547x 0.001411915 2 GeneratorBenchmarkExt#generator_safe 1000 repeats: 569.105020353 ( real) -> 13.296x 0.001757145 3 GeneratorBenchmarkExt#generator_pretty 900 repeats: 482.825371244 ( real) -> 11.280x 0.002071142 4 GeneratorBenchmarkPure#generator_fast 1000 repeats: 62.717626652 ( real) -> 1.465x 0.015944481 5 GeneratorBenchmarkRails#generator 1000 repeats: 43.965681162 ( real) -> 1.027x 0.022745013 6 GeneratorBenchmarkPure#generator_safe 1000 repeats: 43.929073409 ( real) -> 1.026x 7 (>=3859) 0.022763968 7 GeneratorBenchmarkPure#generator_pretty 1000 repeats: 42.802514491 ( real) -> 1.000x 6 (>=3859) 0.023363113 calls/sec ( time) -> speed covers secs/call ``` ## Author Florian Frank ## License Ruby License, see https://www.ruby-lang.org/en/about/license.txt. ## Download The latest version of this library can be downloaded at * https://rubygems.org/gems/json Online Documentation should be located at * http://json.rubyforge.org ruby-json-2.1.0+dfsg.orig/Gemfile0000644000175000017500000000040213113111601016151 0ustar boutilboutil# vim: set ft=ruby: source 'https://rubygems.org' case ENV['JSON'] when 'ext', nil if ENV['RUBY_ENGINE'] == 'jruby' gemspec :name => 'json-java' else gemspec :name => 'json' end when 'pure' gemspec :name => 'json_pure' end gem 'simplecov' ruby-json-2.1.0+dfsg.orig/VERSION0000644000175000017500000000000613113111601015726 0ustar boutilboutil2.1.0 ruby-json-2.1.0+dfsg.orig/references/0000755000175000017500000000000013127544523017025 5ustar boutilboutilruby-json-2.1.0+dfsg.orig/json.gemspec0000644000175000017500000001254213113111601017204 0ustar boutilboutil# -*- encoding: utf-8 -*- # stub: json 2.1.0 ruby lib # stub: ext/json/ext/generator/extconf.rbext/json/ext/parser/extconf.rbext/json/extconf.rb Gem::Specification.new do |s| s.name = "json".freeze s.version = "2.1.0" s.required_rubygems_version = Gem::Requirement.new(">= 0".freeze) if s.respond_to? :required_rubygems_version= s.require_paths = ["lib".freeze] s.authors = ["Florian Frank".freeze] s.date = "2017-04-18" s.description = "This is a JSON implementation as a Ruby extension in C.".freeze s.email = "flori@ping.de".freeze s.extensions = ["ext/json/ext/generator/extconf.rb".freeze, "ext/json/ext/parser/extconf.rb".freeze, "ext/json/extconf.rb".freeze] s.extra_rdoc_files = ["README.md".freeze] s.files = ["./tests/test_helper.rb".freeze, ".gitignore".freeze, ".travis.yml".freeze, "CHANGES.md".freeze, "Gemfile".freeze, "README-json-jruby.md".freeze, "README.md".freeze, "Rakefile".freeze, "VERSION".freeze, "data/example.json".freeze, "data/index.html".freeze, "data/prototype.js".freeze, "diagrams/.keep".freeze, "ext/json/ext/fbuffer/fbuffer.h".freeze, "ext/json/ext/generator/depend".freeze, "ext/json/ext/generator/extconf.rb".freeze, "ext/json/ext/generator/generator.c".freeze, "ext/json/ext/generator/generator.h".freeze, "ext/json/ext/parser/depend".freeze, "ext/json/ext/parser/extconf.rb".freeze, "ext/json/ext/parser/parser.c".freeze, "ext/json/ext/parser/parser.h".freeze, "ext/json/ext/parser/parser.rl".freeze, "ext/json/extconf.rb".freeze, "install.rb".freeze, "java/src/json/ext/ByteListTranscoder.java".freeze, "java/src/json/ext/Generator.java".freeze, "java/src/json/ext/GeneratorMethods.java".freeze, "java/src/json/ext/GeneratorService.java".freeze, "java/src/json/ext/GeneratorState.java".freeze, "java/src/json/ext/OptionsReader.java".freeze, "java/src/json/ext/Parser.java".freeze, "java/src/json/ext/Parser.rl".freeze, "java/src/json/ext/ParserService.java".freeze, "java/src/json/ext/RuntimeInfo.java".freeze, "java/src/json/ext/StringDecoder.java".freeze, "java/src/json/ext/StringEncoder.java".freeze, "java/src/json/ext/Utils.java".freeze, "json-java.gemspec".freeze, "json.gemspec".freeze, "json_pure.gemspec".freeze, "lib/json.rb".freeze, "lib/json/add/bigdecimal.rb".freeze, "lib/json/add/complex.rb".freeze, "lib/json/add/core.rb".freeze, "lib/json/add/date.rb".freeze, "lib/json/add/date_time.rb".freeze, "lib/json/add/exception.rb".freeze, "lib/json/add/ostruct.rb".freeze, "lib/json/add/range.rb".freeze, "lib/json/add/rational.rb".freeze, "lib/json/add/regexp.rb".freeze, "lib/json/add/struct.rb".freeze, "lib/json/add/symbol.rb".freeze, "lib/json/add/time.rb".freeze, "lib/json/common.rb".freeze, "lib/json/ext.rb".freeze, "lib/json/ext/.keep".freeze, "lib/json/generic_object.rb".freeze, "lib/json/pure.rb".freeze, "lib/json/pure/generator.rb".freeze, "lib/json/pure/parser.rb".freeze, "lib/json/version.rb".freeze, "references/rfc7159.txt".freeze, "tests/fixtures/fail10.json".freeze, "tests/fixtures/fail11.json".freeze, "tests/fixtures/fail12.json".freeze, "tests/fixtures/fail13.json".freeze, "tests/fixtures/fail14.json".freeze, "tests/fixtures/fail18.json".freeze, "tests/fixtures/fail19.json".freeze, "tests/fixtures/fail2.json".freeze, "tests/fixtures/fail20.json".freeze, "tests/fixtures/fail21.json".freeze, "tests/fixtures/fail22.json".freeze, "tests/fixtures/fail23.json".freeze, "tests/fixtures/fail24.json".freeze, "tests/fixtures/fail25.json".freeze, "tests/fixtures/fail27.json".freeze, "tests/fixtures/fail28.json".freeze, "tests/fixtures/fail3.json".freeze, "tests/fixtures/fail4.json".freeze, "tests/fixtures/fail5.json".freeze, "tests/fixtures/fail6.json".freeze, "tests/fixtures/fail7.json".freeze, "tests/fixtures/fail8.json".freeze, "tests/fixtures/fail9.json".freeze, "tests/fixtures/obsolete_fail1.json".freeze, "tests/fixtures/pass1.json".freeze, "tests/fixtures/pass15.json".freeze, "tests/fixtures/pass16.json".freeze, "tests/fixtures/pass17.json".freeze, "tests/fixtures/pass2.json".freeze, "tests/fixtures/pass26.json".freeze, "tests/fixtures/pass3.json".freeze, "tests/json_addition_test.rb".freeze, "tests/json_common_interface_test.rb".freeze, "tests/json_encoding_test.rb".freeze, "tests/json_ext_parser_test.rb".freeze, "tests/json_fixtures_test.rb".freeze, "tests/json_generator_test.rb".freeze, "tests/json_generic_object_test.rb".freeze, "tests/json_parser_test.rb".freeze, "tests/json_string_matching_test.rb".freeze, "tests/test_helper.rb".freeze, "tools/diff.sh".freeze, "tools/fuzz.rb".freeze, "tools/server.rb".freeze] s.homepage = "http://flori.github.com/json".freeze s.licenses = ["Ruby".freeze] s.rdoc_options = ["--title".freeze, "JSON implemention for Ruby".freeze, "--main".freeze, "README.md".freeze] s.required_ruby_version = Gem::Requirement.new(">= 1.9".freeze) s.rubygems_version = "2.6.11".freeze s.summary = "JSON Implementation for Ruby".freeze s.test_files = ["./tests/test_helper.rb".freeze] if s.respond_to? :specification_version then s.specification_version = 4 if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then s.add_development_dependency(%q.freeze, [">= 0"]) s.add_development_dependency(%q.freeze, ["~> 2.0"]) else s.add_dependency(%q.freeze, [">= 0"]) s.add_dependency(%q.freeze, ["~> 2.0"]) end else s.add_dependency(%q.freeze, [">= 0"]) s.add_dependency(%q.freeze, ["~> 2.0"]) end end ruby-json-2.1.0+dfsg.orig/json-java.gemspec0000644000175000017500000000201213113111601020112 0ustar boutilboutil#!/usr/bin/env jruby require "rubygems" spec = Gem::Specification.new do |s| s.name = "json" s.version = File.read("VERSION").chomp s.summary = "JSON implementation for JRuby" s.description = "A JSON implementation as a JRuby extension." s.author = "Daniel Luz" s.email = "dev+ruby@mernen.com" s.homepage = "http://json-jruby.rubyforge.org/" s.platform = 'java' s.rubyforge_project = "json-jruby" s.licenses = ["Ruby"] s.files = Dir["{docs,lib,tests}/**/*"] if s.respond_to? :specification_version then s.specification_version = 4 if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then s.add_development_dependency(%q, [">= 0"]) s.add_development_dependency(%q, ["~> 2.0"]) else s.add_dependency(%q, [">= 0"]) s.add_dependency(%q, ["~> 2.0"]) end else s.add_dependency(%q, [">= 0"]) s.add_dependency(%q, ["~> 2.0"]) end end if $0 == __FILE__ Gem::Builder.new(spec).build else spec end ruby-json-2.1.0+dfsg.orig/.gitignore0000644000175000017500000000023613113111601016653 0ustar boutilboutil.*.sw[pon] *.bundle coverage tags pkg .nfs.* .idea java/Json.iml Gemfile.lock .rvmrc *.rbc .rbx .AppleDouble .DS_Store */**/Makefile */**/*.o .byebug_history ruby-json-2.1.0+dfsg.orig/tools/0000755000175000017500000000000013113111601016022 5ustar boutilboutilruby-json-2.1.0+dfsg.orig/tools/fuzz.rb0000755000175000017500000000536213113111601017356 0ustar boutilboutilrequire 'json' class Fuzzer def initialize(n, freqs = {}) sum = freqs.inject(0.0) { |s, x| s + x.last } freqs.each_key { |x| freqs[x] /= sum } s = 0.0 freqs.each_key do |x| freqs[x] = s .. (s + t = freqs[x]) s += t end @freqs = freqs @n = n @alpha = (0..0xff).to_a end def random_string s = '' 30.times { s << @alpha[rand(@alpha.size)] } s end def pick r = rand found = @freqs.find { |k, f| f.include? rand } found && found.first end def make_pick k = pick case when k == Hash, k == Array k.new when k == true, k == false, k == nil k when k == String random_string when k == Fixnum rand(2 ** 30) - 2 ** 29 when k == Bignum rand(2 ** 70) - 2 ** 69 end end def fuzz(current = nil) if @n > 0 case current when nil @n -= 1 current = fuzz [ Hash, Array ][rand(2)].new when Array while @n > 0 @n -= 1 current << case p = make_pick when Array, Hash fuzz(p) else p end end when Hash while @n > 0 @n -= 1 current[random_string] = case p = make_pick when Array, Hash fuzz(p) else p end end end end current end end class MyState < JSON.state WS = " \r\t\n" def initialize super( :indent => make_spaces, :space => make_spaces, :space_before => make_spaces, :object_nl => make_spaces, :array_nl => make_spaces, :max_nesting => false ) end def make_spaces s = '' rand(1).times { s << WS[rand(WS.size)] } s end end n = (ARGV.shift || 500).to_i loop do fuzzer = Fuzzer.new(n, Hash => 25, Array => 25, String => 10, Fixnum => 10, Bignum => 10, nil => 5, true => 5, false => 5 ) o1 = fuzzer.fuzz json = JSON.generate o1, MyState.new if $DEBUG puts "-" * 80 puts json, json.size else puts json.size end begin o2 = JSON.parse(json, :max_nesting => false) rescue JSON::ParserError => e puts "Caught #{e.class}: #{e.message}\n#{e.backtrace * "\n"}" puts "o1 = #{o1.inspect}", "json = #{json}", "json_str = #{json.inspect}" puts "locals = #{local_variables.inspect}" exit end if o1 != o2 puts "mismatch", "o1 = #{o1.inspect}", "o2 = #{o2.inspect}", "json = #{json}", "json_str = #{json.inspect}" puts "locals = #{local_variables.inspect}" end end ruby-json-2.1.0+dfsg.orig/tools/server.rb0000755000175000017500000000260413113111601017662 0ustar boutilboutil#!/usr/bin/env ruby # encoding: utf-8 require 'webrick' include WEBrick $:.unshift 'ext' $:.unshift 'lib' require 'json' class JSONServlet < HTTPServlet::AbstractServlet @@count = 1 def do_GET(req, res) obj = { "TIME" => Time.now.strftime("%FT%T"), "foo" => "Bär", "bar" => "© ≠ €!", 'a' => 2, 'b' => 3.141, 'COUNT' => @@count += 1, 'c' => 'c', 'd' => [ 1, "b", 3.14 ], 'e' => { 'foo' => 'bar' }, 'g' => "松本行弘", 'h' => 1000.0, 'i' => 0.001, 'j' => "\xf0\xa0\x80\x81", } res.body = JSON.generate obj res['Content-Type'] = "application/json" end end def create_server(err, dir, port) dir = File.expand_path(dir) err.puts "Surf to:", "http://#{Socket.gethostname}:#{port}" s = HTTPServer.new( :Port => port, :DocumentRoot => dir, :Logger => WEBrick::Log.new(err), :AccessLog => [ [ err, WEBrick::AccessLog::COMMON_LOG_FORMAT ], [ err, WEBrick::AccessLog::REFERER_LOG_FORMAT ], [ err, WEBrick::AccessLog::AGENT_LOG_FORMAT ] ] ) s.mount("/json", JSONServlet) s end default_dir = File.expand_path(File.join(File.dirname(__FILE__), '..', 'data')) dir = ARGV.shift || default_dir port = (ARGV.shift || 6666).to_i s = create_server(STDERR, dir, 6666) t = Thread.new { s.start } trap(:INT) do s.shutdown t.join exit end sleep ruby-json-2.1.0+dfsg.orig/tools/diff.sh0000755000175000017500000000043713113111601017275 0ustar boutilboutil#!/bin/sh files=`find ext -name '*.[ch]' -o -name parser.rl` for f in $files do b=`basename $f` g=`find ../ruby/ext/json -name $b` d=`diff -u $f $g` test -z "$d" && continue echo "$d" read -p "Edit diff of $b? " a case $a in [yY]*) vimdiff $f $g ;; esac done