autoparse-0.3.3/0000755000175000017500000000000013121477516015015 5ustar balasankarcbalasankarcautoparse-0.3.3/CHANGELOG.md0000644000175000017500000000206213121477516016626 0ustar balasankarcbalasankarc# 0.3.3 * Fixed issue with parent class redefinition # 0.3.2 * Fixed issue with booleans and default values * Fixed issue with class redefinition * Updated Launchy dependency # 0.3.1 * Replaced json gem dependency with multi_json * Fixed issue with certain JSON libraries breaking on automatic #to_json calls # 0.3.0 * Fixed handling of additional properties w/ a set schema * Modified index methods to allow either raw or parsed access * Modified index methods to default to parsed access # 0.2.3 * Fixed stupid bug in inspect method # 0.2.2 * The AutoParse.generate method was changed to use an options Hash * Fixed some issues around array imports and exports * Schemas of type object should now correctly inherit their id URI values # 0.2.1 * Fixed URI resolution when base URI is missing # 0.2.0 * Added support for union types * Added support for recursive references * Aixed vestigial code from refactoring extraction * Fixed issue with references when schema URI is not supplied * Fixed issue with missing gem dependencies # 0.1.0 * Initial release autoparse-0.3.3/Gemfile0000644000175000017500000000050213121477516016305 0ustar balasankarcbalasankarcsource :rubygems gem 'addressable', '>= 2.3.1' gem 'multi_json', '>= 1.0.0' gem 'extlib', '>= 0.9.15' group :development do gem 'launchy', '>= 2.1.1' gem 'yard' gem 'redcarpet' end group :test, :development do gem 'rake', '>= 0.9.0' gem 'rspec', '>= 2.11.0' gem 'rcov', '>= 0.9.9', :platform => :mri_18 end autoparse-0.3.3/README.md0000644000175000017500000000106013121477516016271 0ustar balasankarcbalasankarc# AutoParse
Homepage
http://code.google.com/p/ruby-autoparse/
Author
Bob Aman
Copyright
Copyright © 2010 Google, Inc.
License
Apache 2.0
# Description An implementation of the JSON Schema specification. Provides automatic parsing for a given JSON Schema. # Requirements * Either extlib or activesupport is required for inflection. # Install * sudo gem install autoparse autoparse-0.3.3/spec/0000755000175000017500000000000013121477516015747 5ustar balasankarcbalasankarcautoparse-0.3.3/spec/data/0000755000175000017500000000000013121477516016660 5ustar balasankarcbalasankarcautoparse-0.3.3/spec/data/links.json0000644000175000017500000000136613121477516020701 0ustar balasankarcbalasankarc{ "$schema" : "http://json-schema.org/draft-03/hyper-schema#", "id" : "http://json-schema.org/draft-03/links#", "type" : "object", "properties" : { "href" : { "type" : "string", "required" : true, "format" : "link-description-object-template" }, "rel" : { "type" : "string", "required" : true }, "targetSchema" : {"$ref" : "http://json-schema.org/draft-03/hyper-schema#"}, "method" : { "type" : "string", "default" : "GET" }, "enctype" : { "type" : "string", "requires" : "method" }, "properties" : { "type" : "object", "additionalProperties" : {"$ref" : "http://json-schema.org/draft-03/hyper-schema#"} } } } autoparse-0.3.3/spec/data/schema.json0000644000175000017500000000605213121477516021016 0ustar balasankarcbalasankarc{ "$schema" : "http://json-schema.org/draft-03/schema#", "id" : "http://json-schema.org/draft-03/schema#", "type" : "object", "properties" : { "type" : { "type" : ["string", "array"], "items" : { "type" : ["string", {"$ref" : "#"}] }, "uniqueItems" : true, "default" : "any" }, "properties" : { "type" : "object", "additionalProperties" : {"$ref" : "#"}, "default" : {} }, "patternProperties" : { "type" : "object", "additionalProperties" : {"$ref" : "#"}, "default" : {} }, "additionalProperties" : { "type" : [{"$ref" : "#"}, "boolean"], "default" : {} }, "items" : { "type" : [{"$ref" : "#"}, "array"], "items" : {"$ref" : "#"}, "default" : {} }, "additionalItems" : { "type" : [{"$ref" : "#"}, "boolean"], "default" : {} }, "required" : { "type" : "boolean", "default" : false }, "dependencies" : { "type" : "object", "additionalProperties" : { "type" : ["string", "array", {"$ref" : "#"}], "items" : { "type" : "string" } }, "default" : {} }, "minimum" : { "type" : "number" }, "maximum" : { "type" : "number" }, "exclusiveMinimum" : { "type" : "boolean", "default" : false }, "exclusiveMaximum" : { "type" : "boolean", "default" : false }, "minItems" : { "type" : "integer", "minimum" : 0, "default" : 0 }, "maxItems" : { "type" : "integer", "minimum" : 0 }, "uniqueItems" : { "type" : "boolean", "default" : false }, "pattern" : { "type" : "string", "format" : "regex" }, "minLength" : { "type" : "integer", "minimum" : 0, "default" : 0 }, "maxLength" : { "type" : "integer" }, "enum" : { "type" : "array", "minItems" : 1, "uniqueItems" : true }, "default" : { "type" : "any" }, "title" : { "type" : "string" }, "description" : { "type" : "string" }, "format" : { "type" : "string" }, "divisibleBy" : { "type" : "number", "minimum" : 0, "exclusiveMinimum" : true, "default" : 1 }, "disallow" : { "type" : ["string", "array"], "items" : { "type" : ["string", {"$ref" : "#"}] }, "uniqueItems" : true }, "extends" : { "type" : [{"$ref" : "#"}, "array"], "items" : {"$ref" : "#"}, "default" : {} }, "id" : { "type" : "string", "format" : "uri" }, "$ref" : { "type" : "string", "format" : "uri" }, "$schema" : { "type" : "string", "format" : "uri" } }, "dependencies" : { "exclusiveMinimum" : "minimum", "exclusiveMaximum" : "maximum" }, "default" : {} } autoparse-0.3.3/spec/data/adult.json0000644000175000017500000000014213121477516020661 0ustar balasankarcbalasankarc{ "description":"An adult", "properties":{"age":{"minimum": 21}}, "extends":"person.json" } autoparse-0.3.3/spec/data/interfaces.json0000644000175000017500000000137113121477516021700 0ustar balasankarcbalasankarc{ "extends":"http://json-schema.org/hyper-schema", "description":"A schema for schema interface definitions that describe programmatic class structures using JSON schema syntax", "properties":{ "methods":{ "type":"object", "description":"This defines the set of methods available to the class instances", "additionalProperties":{ "type":"object", "description":"The definition of the method", "properties":{ "parameters":{ "type":"array", "description":"The set of parameters that should be passed to the method when it is called", "items":{"$ref":"#"}, "required": true }, "returns":{"$ref":"#"} } } } } } autoparse-0.3.3/spec/data/address.json0000644000175000017500000000130013121477516021172 0ustar balasankarcbalasankarc{ "description" : "An Address following the convention of http://microformats.org/wiki/hcard", "type" : "object", "properties" : { "post-office-box" : { "type" : "string" }, "extended-address" : { "type" : "string" }, "street-address" : { "type":"string" }, "locality" : { "type" : "string", "required" : true }, "region" : { "type" : "string", "required" : true }, "postal-code" : { "type" : "string" }, "country-name" : { "type" : "string", "required" : true } }, "dependencies" : { "post-office-box" : "street-address", "extended-address" : "street-address", "street-address" : "region", "locality" : "region", "region" : "country-name" } } autoparse-0.3.3/spec/data/json-ref.json0000644000175000017500000000072113121477516021276 0ustar balasankarcbalasankarc{ "$schema" : "http://json-schema.org/draft-03/hyper-schema#", "id" : "http://json-schema.org/draft-03/json-ref#", "additionalItems" : {"$ref" : "#"}, "additionalProperties" : {"$ref" : "#"}, "links" : [ { "href" : "{id}", "rel" : "self" }, { "href" : "{$ref}", "rel" : "full" }, { "href" : "{$schema}", "rel" : "describedby" } ], "fragmentResolution" : "dot-delimited" } autoparse-0.3.3/spec/data/calendar.json0000644000175000017500000000177213121477516021333 0ustar balasankarcbalasankarc{ "description" : "A representation of an event", "type" : "object", "properties" : { "dtstart" : { "format" : "date-time", "type" : "string", "description" : "Event starting time", "required":true }, "summary" : { "type":"string", "required":true }, "location" : { "type" : "string" }, "url" : { "type" : "string", "format" : "url" }, "dtend" : { "format" : "date-time", "type" : "string", "description" : "Event ending time" }, "duration" : { "format" : "date", "type" : "string", "description" : "Event duration" }, "rdate" : { "format" : "date-time", "type" : "string", "description" : "Recurrence date" }, "rrule" : { "type" : "string", "description" : "Recurrence rule" }, "category" : { "type" : "string" }, "description" : { "type" : "string" }, "geo" : { "$ref" : "geo.json" } } } autoparse-0.3.3/spec/data/person.json0000644000175000017500000000024513121477516021062 0ustar balasankarcbalasankarc{ "description":"A person", "type":"object", "properties":{ "name":{"type":"string"}, "age" :{ "type":"integer", "maximum":125 } } } autoparse-0.3.3/spec/data/file.json0000644000175000017500000001474113121477516020501 0ustar balasankarcbalasankarc{ "id": "File", "type": "object", "description": "The metadata for a file.", "properties": { "alternateLink": { "type": "string", "description": "A link for opening the file in a browser." }, "createdDate": { "type": "string", "description": "Create time for this file (formatted ISO8601 timestamp).", "format": "date-time" }, "description": { "type": "string", "description": "A short description of the file." }, "downloadUrl": { "type": "string", "description": "Short term download URL for the file. This will only be populated on files with content stored in Drive." }, "editable": { "type": "boolean", "description": "Whether the file can be edited by the current user." }, "embedLink": { "type": "string", "description": "A link for embedding the file." }, "etag": { "type": "string", "description": "ETag of the file." }, "exportLinks": { "type": "object", "description": "Links for exporting Google Docs to specific formats.", "additionalProperties": { "type": "string", "description": "A mapping from export format to URL" } }, "fileExtension": { "type": "string", "description": "The file extension used when downloading this file. This field is read only. To set the extension, include it on title when creating the file. This will only be populated on files with content stored in Drive." }, "fileSize": { "type": "string", "description": "The size of the file in bytes. This will only be populated on files with content stored in Drive.", "format": "int64" }, "id": { "type": "string", "description": "The id of the file." }, "indexableText": { "type": "object", "description": "Indexable text attributes for the file (can only be written)", "properties": { "text": { "type": "string", "description": "The text to be indexed for this file" } } }, "kind": { "type": "string", "description": "The type of file. This is always drive#file.", "default": "drive#file" }, "labels": { "type": "object", "description": "A group of labels for the file.", "properties": { "hidden": { "type": "boolean", "description": "Whether this file is hidden from the user." }, "restricted": { "type": "boolean", "description": "Whether viewers are prevented from downloading this file." }, "starred": { "type": "boolean", "description": "Whether this file is starred by the user." }, "trashed": { "type": "boolean", "description": "Whether this file has been trashed." }, "viewed": { "type": "boolean", "description": "Whether this file has been viewed by this user." } } }, "lastModifyingUserName": { "type": "string", "description": "Name of the last user to modify this file. This will only be populated if a user has edited this file." }, "lastViewedByMeDate": { "type": "string", "description": "Last time this file was viewed by the user (formatted RFC 3339 timestamp).", "format": "date-time" }, "md5Checksum": { "type": "string", "description": "An MD5 checksum for the content of this file. This will only be populated on files with content stored in Drive." }, "mimeType": { "type": "string", "description": "The MIME type of the file." }, "modifiedByMeDate": { "type": "string", "description": "Last time this file was modified by the user (formatted RFC 3339 timestamp).", "format": "date-time" }, "modifiedDate": { "type": "string", "description": "Last time this file was modified by anyone (formatted RFC 3339 timestamp).", "format": "date-time" }, "originalFilename": { "type": "string", "description": "The filename when uploading this file. This will only be populated on files with content stored in Drive." }, "ownerNames": { "type": "array", "description": "Name(s) of the owner(s) of this file.", "items": { "type": "string" } }, "parents": { "type": "array", "description": "Collection of parent folders which contain this file.\nSetting this field will put the file in all of the provided folders. On insert, if no folders are provided, the file will be placed in the default root folder.", "items": { "$ref": "#ParentReference" } }, "permissionsLink": { "type": "string", "description": "A link to the permissions collection." }, "quotaBytesUsed": { "type": "string", "description": "The number of quota bytes used by this file.", "format": "int64" }, "selfLink": { "type": "string", "description": "A link back to this file." }, "sharedWithMeDate": { "type": "string", "description": "Time at which this file was shared with the user (formatted RFC 3339 timestamp).", "format": "date-time" }, "thumbnailLink": { "type": "string", "description": "A link to the file's thumbnail." }, "title": { "type": "string", "description": "The title of this file." }, "writersCanShare": { "type": "boolean", "description": "Whether writers can share the document with other users." } } } autoparse-0.3.3/spec/data/hyper-schema.json0000644000175000017500000000207313121477516022142 0ustar balasankarcbalasankarc{ "$schema" : "http://json-schema.org/draft-03/hyper-schema#", "extends" : {"$ref" : "http://json-schema.org/draft-03/schema#"}, "id" : "http://json-schema.org/draft-03/hyper-schema#", "properties" : { "links" : { "type" : "array", "items" : {"$ref" : "http://json-schema.org/draft-03/links#"} }, "fragmentResolution" : { "type" : "string", "default" : "slash-delimited" }, "root" : { "type" : "boolean", "default" : false }, "readonly" : { "type" : "boolean", "default" : false }, "contentEncoding" : { "type" : "string" }, "pathStart" : { "type" : "string", "format" : "uri" }, "mediaType" : { "type" : "string", "format" : "media-type" } }, "links" : [ { "href" : "{id}", "rel" : "self" }, { "href" : "{$ref}", "rel" : "full" }, { "href" : "{$schema}", "rel" : "describedby" } ], "fragmentResolution" : "slash-delimited" } autoparse-0.3.3/spec/data/user-list.json0000644000175000017500000000031113121477516021475 0ustar balasankarcbalasankarc{ "description": "A mapping of users.", "type": "object", "properties": { "users": { "type": "object", "additionalProperties": { "$ref": "person.json" } } } } autoparse-0.3.3/spec/data/node.json0000644000175000017500000000032113121477516020474 0ustar balasankarcbalasankarc{ "description" : "A binary tree node", "type" : ["object", "null"], "properties" : { "left" : { "$ref" : "node.json" }, "right" : { "$ref" : "node.json" }, "value": { "type" : "any" } } } autoparse-0.3.3/spec/data/account.json0000644000175000017500000000023713121477516021211 0ustar balasankarcbalasankarc{ "description":"A bank account", "type":"object", "properties":{ "accountNumber":{"type":"string"}, "balance" :{"$ref" : "positive.json"} } } autoparse-0.3.3/spec/data/geo.json0000644000175000017500000000026013121477516020323 0ustar balasankarcbalasankarc{ "description" : "A geographical coordinate", "type" : "object", "properties" : { "latitude" : { "type" : "number" }, "longitude" : { "type" : "number" } } } autoparse-0.3.3/spec/data/booleans.json0000644000175000017500000000064113121477516021356 0ustar balasankarcbalasankarc{ "description":"A bunch of booleans", "type":"object", "properties": { "truthy": { "type": "boolean", "default": null }, "untrue": { "type": "boolean", "default": false }, "accurate": { "type": "boolean", "default": true }, "maybe": { "type": "boolean" }, "guaranteed": { "type": "boolean", "required": true } } } autoparse-0.3.3/spec/data/card.json0000644000175000017500000000345513121477516020473 0ustar balasankarcbalasankarc{ "description":"A representation of a person, company, organization, or place", "type":"object", "properties":{ "fn":{ "description":"Formatted Name", "type":"string" }, "familyName":{ "type":"string", "required":true }, "givenName":{ "type":"string", "required":true }, "additionalName":{ "type":"array", "items":{ "type":"string" } }, "honorificPrefix":{ "type":"array", "items":{ "type":"string" } }, "honorificSuffix":{ "type":"array", "items":{ "type":"string" } }, "nickname":{ "type":"string" }, "url":{ "type":"string", "format":"url" }, "email":{ "type":"object", "properties":{ "type":{ "type":"string" }, "value":{ "type":"string", "format":"email" } } }, "tel":{ "type":"object", "properties":{ "type":{ "type":"string" }, "value":{ "type":"string", "format":"phone" } } }, "adr":{"$ref" : "address.json"}, "geo":{"$ref" : "geo.json"}, "tz":{ "type":"string" }, "photo":{ "format":"image", "type":"string" }, "logo":{ "format":"image", "type":"string" }, "sound":{ "format":"attachment", "type":"string" }, "bday":{ "type":"string", "format":"date" }, "title":{ "type":"string" }, "role":{ "type":"string" }, "org":{ "type":"object", "properties":{ "organizationName":{ "type":"string" }, "organizationUnit":{ "type":"string" } } } } } autoparse-0.3.3/spec/data/parentref.json0000644000175000017500000000143013121477516021537 0ustar balasankarcbalasankarc{ "id": "ParentReference", "type": "object", "description": "A reference to a file's parent.", "properties": { "id": { "type": "string", "description": "The ID of the parent." }, "isRoot": { "type": "boolean", "description": "Whether or not the parent is the root folder." }, "kind": { "type": "string", "description": "This is always drive#parentReference.", "default": "drive#parentReference" }, "parentLink": { "type": "string", "description": "A link to the parent." }, "selfLink": { "type": "string", "description": "A link back to this reference." } } } autoparse-0.3.3/spec/data/filelist.json0000644000175000017500000000166613121477516021377 0ustar balasankarcbalasankarc{ "id": "FileList", "type": "object", "description": "A list of files.", "properties": { "etag": { "type": "string", "description": "The ETag of the list." }, "items": { "type": "array", "description": "The actual list of files.", "items": { "$ref": "#File" } }, "kind": { "type": "string", "description": "This is always drive#fileList.", "default": "drive#fileList" }, "nextLink": { "type": "string", "description": "A link to the next page of files." }, "nextPageToken": { "type": "string", "description": "The page token for the next page of files." }, "selfLink": { "type": "string", "description": "A link back to this list." } } } autoparse-0.3.3/spec/data/positive.json0000644000175000017500000000011513121477516021412 0ustar balasankarcbalasankarc{ "description": "A positive number", "type":"integer", "minimum": 0 } autoparse-0.3.3/spec/spec.opts0000644000175000017500000000003213121477516017603 0ustar balasankarcbalasankarc--colour --format specdoc autoparse-0.3.3/spec/autoparse/0000755000175000017500000000000013121477516017752 5ustar balasankarcbalasankarcautoparse-0.3.3/spec/autoparse/instance_spec.rb0000644000175000017500000012730113121477516023121 0ustar balasankarcbalasankarc# Copyright 2010 Google Inc # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. spec_dir = File.expand_path(File.join(File.dirname(__FILE__), '..')) require 'spec_helper' require 'json' require 'autoparse' require 'addressable/uri' describe AutoParse::Instance, 'with an empty schema' do include JSONMatchers before do @parser = AutoParse::EMPTY_SCHEMA end it 'should have a nil URI' do @parser.uri.should be_nil end it 'should accept all inputs' do instance = @parser.new({ "this" => "doesn't", "really" => ["matter", "at", "all"], "!" => 1.2345 }) instance.should be_valid end it 'should expose values via index methods' do instance = @parser.new({ "this" => "doesn't", "really" => ["matter", "at", "all"], "!" => 1.2345 }) instance["this"].should == "doesn't" instance["really"].should == ["matter", "at", "all"] instance["!"].should == 1.2345 end it 'should be coerceable to a Hash value' do instance = @parser.new({ "this" => "doesn't", "really" => ["matter", "at", "all"], "!" => 1.2345 }) instance.to_hash.should == { "this" => "doesn't", "really" => ["matter", "at", "all"], "!" => 1.2345 } end it 'should convert to a JSON string' do instance = @parser.new({"be" => "brief"}) instance.to_json.should be_json '{"be":"brief"}' end end describe AutoParse::Instance, 'with the geo schema' do include JSONMatchers before do @uri = Addressable::URI.new( :scheme => 'file', :host => '', :path => File.expand_path(File.join(spec_dir, './data/geo.json')) ) @schema_data = JSON.parse(File.open(@uri.path, 'r') { |f| f.read }) @parser = AutoParse.generate(@schema_data, :uri => @uri) end it 'should have the correct URI' do @parser.uri.should === @uri end it 'should accept a valid geographic coordinate input' do instance = @parser.new({ "latitude" => 37.422, "longitude" => -122.084 }) instance.should be_valid end it 'should not accept an invalid geographic coordinate input' do instance = @parser.new({ "latitude" => "not", "longitude" => "valid" }) instance.should_not be_valid end it 'should accept extra fields' do instance = @parser.new({ "latitude" => 37.422, "longitude" => -122.084, "extra" => "bonus!" }) instance.should be_valid end it 'should expose values via generated accessors' do instance = @parser.new({ "latitude" => 37.422, "longitude" => -122.084 }) instance.latitude.should == 37.422 instance.longitude.should == -122.084 end it 'should alter output structure via generated mutators' do instance = @parser.new instance.latitude = 37.422 instance.longitude = -122.084 instance.to_hash.should == { "latitude" => 37.422, "longitude" => -122.084 } end it 'should be coerceable to a Hash value' do instance = @parser.new({ "latitude" => 37.422, "longitude" => -122.084 }) instance.to_hash.should == { "latitude" => 37.422, "longitude" => -122.084 } end it 'should convert to a JSON string' do instance = @parser.new({ "latitude" => 37.422, "longitude" => -122.084 }) instance.to_json.should be_json '{"latitude":37.422,"longitude":-122.084}' end end describe AutoParse::Instance, 'with the address schema' do include JSONMatchers before do @uri = Addressable::URI.new( :scheme => 'file', :host => '', :path => File.expand_path(File.join(spec_dir, './data/address.json')) ) @schema_data = JSON.parse(File.open(@uri.path, 'r') { |f| f.read }) @parser = AutoParse.generate(@schema_data, :uri => @uri) end it 'should have the correct URI' do @parser.uri.should === @uri end it 'should accept a valid address input' do instance = @parser.new({ "post-office-box" => "PO Box 3.14159", "street-address" => "1600 Amphitheatre Parkway", "locality" => "Mountain View", "region" => "CA", "postal-code" => "94043", "country-name" => "United States" }) instance.should be_valid end it 'should accept extra fields' do instance = @parser.new({ "post-office-box" => "PO Box 3.14159", "street-address" => "1600 Amphitheatre Parkway", "locality" => "Mountain View", "region" => "CA", "postal-code" => "94043", "country-name" => "United States", "extra" => "bonus!" }) instance.should be_valid end it 'should accept a minimally valid address input' do instance = @parser.new({ "locality" => "Mountain View", "region" => "CA", "country-name" => "United States" }) instance.should be_valid end it 'should not accept an address with unmet dependencies' do instance = @parser.new({ "post-office-box" => "PO Box 3.14159", "extended-address" => "Apt 2.71828", "locality" => "Mountain View", "region" => "CA", "country-name" => "United States" }) instance.should_not be_valid end it 'should expose values via generated accessors' do instance = @parser.new({ "post-office-box" => "PO Box 3.14159", "street-address" => "1600 Amphitheatre Parkway", "locality" => "Mountain View", "region" => "CA", "postal-code" => "94043", "country-name" => "United States" }) instance.post_office_box.should == "PO Box 3.14159" instance.street_address.should == "1600 Amphitheatre Parkway" instance.locality.should == "Mountain View" instance.region.should == "CA" instance.postal_code.should == "94043" instance.country_name.should == "United States" end it 'should alter output structure via generated mutators' do instance = @parser.new instance.post_office_box = "PO Box 3.14159" instance.street_address = "1600 Amphitheatre Parkway" instance.locality = "Mountain View" instance.region = "CA" instance.postal_code = "94043" instance.country_name = "United States" instance.to_hash.should == { "post-office-box" => "PO Box 3.14159", "street-address" => "1600 Amphitheatre Parkway", "locality" => "Mountain View", "region" => "CA", "postal-code" => "94043", "country-name" => "United States" } end it 'should be coerceable to a Hash value' do instance = @parser.new({ "post-office-box" => "PO Box 3.14159", "street-address" => "1600 Amphitheatre Parkway", "locality" => "Mountain View", "region" => "CA", "postal-code" => "94043", "country-name" => "United States" }) instance.to_hash.should == { "post-office-box" => "PO Box 3.14159", "street-address" => "1600 Amphitheatre Parkway", "locality" => "Mountain View", "region" => "CA", "postal-code" => "94043", "country-name" => "United States" } end end describe AutoParse::Instance, 'with the person schema' do include JSONMatchers before do @uri = Addressable::URI.new( :scheme => 'file', :host => '', :path => File.expand_path(File.join(spec_dir, './data/person.json')) ) @schema_data = JSON.parse(File.open(@uri.path, 'r') { |f| f.read }) @parser = AutoParse.generate(@schema_data, :uri => @uri) end it 'should have the correct URI' do @parser.uri.should === @uri end it 'should accept a valid person input' do instance = @parser.new({ "name" => "Bob Aman", "age" => 29 }) instance.should be_valid end it 'should accept extra fields' do instance = @parser.new({ "name" => "Bob Aman", "age" => 29, "extra" => "bonus!" }) instance.should be_valid end it 'should validate a person whose age is equal to the maximum' do instance = @parser.new({ "name" => "Aged Outlier", "age" => 125 }) instance.should be_valid end it 'should validate a young person' do instance = @parser.new({ "name" => "Joe Teenager", "age" => 15 }) instance.should be_valid end it 'should not accept an invalid person input' do instance = @parser.new({ "name" => "Methuselah", "age" => 969 }) instance.should_not be_valid end it 'should not accept ages which are not integers' do instance = @parser.new({ "name" => "Bob Aman", "age" => 29.7 }) instance.should_not be_valid end it 'should expose values via generated accessors' do instance = @parser.new({ "name" => "Bob Aman", "age" => 29 }) instance.name.should == "Bob Aman" instance.age.should == 29 end it 'should alter output structure via generated mutators' do instance = @parser.new instance.name = "Bob Aman" instance.age = 29 instance.to_hash.should == { "name" => "Bob Aman", "age" => 29 } end it 'should be coerceable to a Hash value' do instance = @parser.new({ "name" => "Bob Aman", "age" => 29 }) instance.to_hash.should == { "name" => "Bob Aman", "age" => 29 } end it 'should convert to a JSON string' do instance = @parser.new({ "name" => "Bob Aman", "age" => 29 }) instance.to_json.should be_json '{"name":"Bob Aman","age":29}' end end describe AutoParse::Instance, 'with the adult schema' do include JSONMatchers before do @person_uri = Addressable::URI.new( :scheme => 'file', :host => '', :path => File.expand_path(File.join(spec_dir, './data/person.json')) ) @person_schema_data = JSON.parse(File.open(@person_uri.path, 'r') { |f| f.read }) @person_parser = AutoParse.generate( @person_schema_data, :uri => @person_uri ) @adult_uri = Addressable::URI.new( :scheme => 'file', :host => '', :path => File.expand_path(File.join(spec_dir, './data/adult.json')) ) @adult_schema_data = JSON.parse(File.open(@adult_uri.path, 'r') { |f| f.read }) @adult_parser = AutoParse.generate(@adult_schema_data, :uri => @adult_uri) end it 'should have the correct URI' do @person_parser.uri.should === @person_uri @adult_parser.uri.should === @adult_uri end it 'should accept a valid person input' do instance = @adult_parser.new({ "name" => "Bob Aman", "age" => 29 }) instance.should be_valid end it 'should accept extra fields' do instance = @adult_parser.new({ "name" => "Bob Aman", "age" => 29, "extra" => "bonus!" }) instance.should be_valid end it 'should validate a person whose age is equal to the maximum' do instance = @adult_parser.new({ "name" => "Aged Outlier", "age" => 125 }) instance.should be_valid end it 'should not validate a young person' do instance = @adult_parser.new({ "name" => "Joe Teenager", "age" => 15 }) instance.should_not be_valid end it 'should not accept an invalid person input' do instance = @adult_parser.new({ "name" => "Methuselah", "age" => 969 }) instance.should_not be_valid end it 'should not accept ages which are not integers' do instance = @adult_parser.new({ "name" => "Bob Aman", "age" => 29.7 }) instance.should_not be_valid end it 'should expose values via generated accessors' do instance = @adult_parser.new({ "name" => "Bob Aman", "age" => 29 }) instance.name.should == "Bob Aman" instance.age.should == 29 end it 'should alter output structure via generated mutators' do instance = @adult_parser.new instance.name = "Bob Aman" instance.age = 29 instance.to_hash.should == { "name" => "Bob Aman", "age" => 29 } end it 'should be coerceable to a Hash value' do instance = @adult_parser.new({ "name" => "Bob Aman", "age" => 29 }) instance.to_hash.should == { "name" => "Bob Aman", "age" => 29 } end it 'should convert to a JSON string' do instance = @adult_parser.new({ "name" => "Bob Aman", "age" => 29 }) instance.to_json.should be_json '{"name":"Bob Aman","age":29}' end end describe AutoParse::Instance, 'with the user list schema' do include JSONMatchers before do @person_uri = Addressable::URI.new( :scheme => 'file', :host => '', :path => File.expand_path(File.join(spec_dir, './data/person.json')) ) @person_schema_data = JSON.parse(File.open(@person_uri.path, 'r') { |f| f.read }) @person_parser = AutoParse.generate( @person_schema_data, :uri => @person_uri ) @user_list_uri = Addressable::URI.new( :scheme => 'file', :host => '', :path => File.expand_path(File.join(spec_dir, './data/user-list.json')) ) @user_list_schema_data = JSON.parse(File.open(@user_list_uri.path, 'r') { |f| f.read }) @user_list_parser = AutoParse.generate( @user_list_schema_data, :uri => @user_list_uri ) end it 'should have the correct URI' do @user_list_parser.uri.should === @user_list_uri end it 'should accept a valid person input' do instance = @user_list_parser.new({ "users" => { "bobaman@google.com" => { "name" => "Bob Aman", "age" => 29 } } }) instance.should be_valid end it 'should accept extra fields' do instance = @user_list_parser.new({ "users" => { "bobaman@google.com" => { "name" => "Bob Aman", "age" => 29, "extra" => "bonus!" } }, "extra" => "bonus!" }) instance.should be_valid end it 'should not accept additional properties that do not validate' do instance = @user_list_parser.new({ "users" => { "bobaman@google.com" => { "name" => "Bob Aman", "age" => 29 }, "oldguy@example.com" => { "name" => "Methuselah", "age" => 969 } } }) instance.should_not be_valid end it 'should not accept additional properties that do not validate' do instance = @user_list_parser.new({ "users" => { "bobaman@google.com" => { "name" => "Bob Aman", "age" => 29.7 } } }) instance.should_not be_valid end it 'should expose values via generated accessors' do instance = @user_list_parser.new({ "users" => { "bobaman@google.com" => { "name" => "Bob Aman", "age" => 29 } } }) instance.users['bobaman@google.com'].name.should == "Bob Aman" instance.users['bobaman@google.com'].age.should == 29 end it 'should permit access to raw values' do instance = @user_list_parser.new({ "users" => { "bobaman@google.com" => { "name" => "Bob Aman", "age" => 29 } } }) instance.users['bobaman@google.com', true].should == { "name" => "Bob Aman", "age" => 29 } end it 'should alter output structure via generated mutators' do instance = @user_list_parser.new instance.users = {} instance.users["bobaman@google.com"] = @person_parser.new instance.users["bobaman@google.com"].name = "Bob Aman" instance.users["bobaman@google.com"].age = 29 instance.to_hash.should == { "users" => { "bobaman@google.com" => { "name" => "Bob Aman", "age" => 29 } } } end it 'should allow raw assignment' do instance = @user_list_parser.new instance.users = {} instance.users["bobaman@google.com", true] = { "name" => "Bob Aman", "age" => 29 } instance.to_hash.should == { "users" => { "bobaman@google.com" => { "name" => "Bob Aman", "age" => 29 } } } end it 'should be coerceable to a Hash value' do instance = @user_list_parser.new({ "users" => { "bobaman@google.com" => { "name" => "Bob Aman", "age" => 29 } } }) instance.to_hash.should == { "users" => { "bobaman@google.com" => { "name" => "Bob Aman", "age" => 29 } } } end it 'should convert to a JSON string' do instance = @user_list_parser.new({ "users" => { "bobaman@google.com" => { "name" => "Bob Aman", "age" => 29 } } }) instance.to_json.should be_json ( '{"users":{"bobaman@google.com":{"name":"Bob Aman","age":29}}}' ) end end describe AutoParse::Instance, 'with the positive schema' do include JSONMatchers before do @positive_uri = Addressable::URI.new( :scheme => 'file', :host => '', :path => File.expand_path(File.join(spec_dir, './data/positive.json')) ) @positive_schema_data = JSON.parse(File.open(@positive_uri.path, 'r') { |f| f.read }) @positive_parser = AutoParse.generate( @positive_schema_data, :uri => @positive_uri ) end it 'should have the correct URI' do @positive_parser.uri.should === @positive_uri end it 'should not allow instantiation' do (lambda do instance = @positive_parser.new(-1000) end).should raise_error(TypeError) end it 'should not allow instantiation, even for a valid positive integer' do (lambda do instance = @positive_parser.new(1000) end).should raise_error(TypeError) end end describe AutoParse::Instance, 'with the account schema' do include JSONMatchers before do @positive_uri = Addressable::URI.new( :scheme => 'file', :host => '', :path => File.expand_path(File.join(spec_dir, './data/positive.json')) ) @positive_schema_data = JSON.parse(File.open(@positive_uri.path, 'r') { |f| f.read }) @positive_parser = AutoParse.generate( @positive_schema_data, :uri => @positive_uri ) @account_uri = Addressable::URI.new( :scheme => 'file', :host => '', :path => File.expand_path(File.join(spec_dir, './data/account.json')) ) @account_schema_data = JSON.parse(File.open(@account_uri.path, 'r') { |f| f.read }) @account_parser = AutoParse.generate( @account_schema_data, :uri => @account_uri ) end it 'should have the correct URI' do @positive_parser.uri.should === @positive_uri @account_parser.uri.should === @account_uri end it 'should accept a valid account input' do instance = @account_parser.new({ "accountNumber" => "12345", "balance" => 1000 }) instance.should be_valid end it 'should accept extra fields' do instance = @account_parser.new({ "accountNumber" => "12345", "balance" => 1000, "extra" => "bonus!" }) instance.should be_valid end it 'should validate an account with a zero balance' do instance = @account_parser.new({ "accountNumber" => "12345", "balance" => 0 }) instance.should be_valid end it 'should not validate a negative account balance' do instance = @account_parser.new({ "accountNumber" => "12345", "balance" => -1000 }) instance.should_not be_valid end it 'should not accept an invalid account input' do instance = @account_parser.new({ "accountNumber" => "12345", "balance" => "bogus" }) instance.should_not be_valid end it 'should expose values via generated accessors' do instance = @account_parser.new({ "accountNumber" => "12345", "balance" => 1000 }) instance.account_number.should == "12345" instance.balance.should == 1000 end it 'should alter output structure via generated mutators' do instance = @account_parser.new instance.account_number = "12345" instance.balance = 1000 instance.to_hash.should == { "accountNumber" => "12345", "balance" => 1000 } end it 'should be coerceable to a Hash value' do instance = @account_parser.new({ "accountNumber" => "12345", "balance" => 1000 }) instance.to_hash.should == { "accountNumber" => "12345", "balance" => 1000 } end it 'should convert to a JSON string' do instance = @account_parser.new({ "accountNumber" => "12345", "balance" => 1000 }) instance.to_json.should be_json '{"accountNumber":"12345","balance":1000}' end end describe AutoParse::Instance, 'with the card schema' do include JSONMatchers before do @address_uri = Addressable::URI.new( :scheme => 'file', :host => '', :path => File.expand_path(File.join(spec_dir, './data/address.json')) ) @address_schema_data = JSON.parse(File.open(@address_uri.path, 'r') { |f| f.read }) @address_parser = AutoParse.generate( @address_schema_data, :uri => @address_uri ) @geo_uri = Addressable::URI.new( :scheme => 'file', :host => '', :path => File.expand_path(File.join(spec_dir, './data/geo.json')) ) @geo_schema_data = JSON.parse(File.open(@geo_uri.path, 'r') { |f| f.read }) @geo_parser = AutoParse.generate(@geo_schema_data, :uri => @geo_uri) @card_uri = Addressable::URI.new( :scheme => 'file', :host => '', :path => File.expand_path(File.join(spec_dir, './data/card.json')) ) @card_schema_data = JSON.parse(File.open(@card_uri.path, 'r') { |f| f.read }) @card_parser = AutoParse.generate(@card_schema_data, :uri => @card_uri) end it 'should have the correct URI' do @address_parser.uri.should === @address_uri @geo_parser.uri.should === @geo_uri @card_parser.uri.should === @card_uri end it 'should allow nested objects to be accessed in raw' do instance = @card_parser.new({ "givenName" => "Robert", "familyName" => "Aman", "org" => { "organizationName" => "Google, Inc.", "organizationUnit" => "Developer Relations" } }) instance['org', true].should == { "organizationName" => "Google, Inc.", "organizationUnit" => "Developer Relations" } end it 'should have the correct URI for anonymous nested objects' do instance = @card_parser.new({ "givenName" => "Robert", "familyName" => "Aman", "org" => { "organizationName" => "Google, Inc.", "organizationUnit" => "Developer Relations" } }) # Anonymous schemas inherit the parent schema's URI. instance.org.class.uri.should === @card_uri end it 'should have the correct URI for external nested objects' do instance = @card_parser.new({ "givenName" => "Robert", "familyName" => "Aman", "adr" => { "locality" => "Lavington", "region" => "Nairobi", "country-name" => "Kenya" } }) # External schemas have their own URI. instance.adr.class.uri.should === @address_uri end it 'should accept a valid card input' do instance = @card_parser.new({ "givenName" => "Robert", "familyName" => "Aman" }) instance.should be_valid end it 'should accept extra fields' do instance = @card_parser.new({ "givenName" => "Robert", "familyName" => "Aman", "extra" => "bonus!" }) instance.should be_valid end it 'should accept a more complete card input' do instance = @card_parser.new({ "givenName" => "Robert", "familyName" => "Aman", "additionalName" => ["Danger"], "nickname" => "Bob", "url" => "https://plus.google.com/116452824309856782163", "email" => { "type" => "work", "value" => "bobaman@google.com" }, "tel" => { "type" => "fake", "value" => "867-5309" }, "tz" => "+03:00", "logo" => "https://secure.gravatar.com/avatar/56ee28134dd0776825445e3551979b14", "org" => { "organizationName" => "Google, Inc.", "organizationUnit" => "Developer Relations" } }) instance.should be_valid end it 'should accept a card input with an externally referenced schema' do instance = @card_parser.new({ "givenName" => "Robert", "familyName" => "Aman", "adr" => { "locality" => "Lavington", "region" => "Nairobi", "country-name" => "Kenya" }, "geo" => { "latitude" => -1.290034, "longitude" => 36.771584 } }) instance.adr.should be_valid instance.geo.should be_valid instance.should be_valid end it 'should not validate a card input with invalid array values' do instance = @card_parser.new({ "givenName" => "Robert", "familyName" => "Aman", "additionalName" => [3.14159] }) instance.should_not be_valid end it 'should not validate a card input when external schema is invalid' do instance = @card_parser.new({ "givenName" => "Robert", "familyName" => "Aman", "adr" => { "extended-address" => "Apt 2.71828", "locality" => "Lavington", "region" => "Nairobi", "country-name" => "Kenya" }, "geo" => { "latitude" => -1.290034, "longitude" => 36.771584 } }) instance.adr.should_not be_valid instance.should_not be_valid instance = @card_parser.new({ "givenName" => "Robert", "familyName" => "Aman", "adr" => { "locality" => "Lavington", "region" => "Nairobi", "country-name" => "Kenya" }, "geo" => { "latitude" => "not", "longitude" => "valid" } }) instance.geo.should_not be_valid instance.should_not be_valid end it 'should expose values via generated accessors' do instance = @card_parser.new({ "givenName" => "Robert", "familyName" => "Aman", "additionalName" => ["Danger"], "nickname" => "Bob", "url" => "https://plus.google.com/116452824309856782163", "email" => { "type" => "work", "value" => "bobaman@google.com" }, "tel" => { "type" => "fake", "value" => "867-5309" }, "tz" => "+03:00", "logo" => "https://secure.gravatar.com/avatar/56ee28134dd0776825445e3551979b14", "org" => { "organizationName" => "Google, Inc.", "organizationUnit" => "Developer Relations" } }) instance.given_name.should == "Robert" instance.family_name.should == "Aman" instance.additional_name.should == ["Danger"] instance.nickname.should == "Bob" instance.url.should be_kind_of(Addressable::URI) instance.url.should === "https://plus.google.com/116452824309856782163" instance.email.type.should == "work" instance.email.value.should == "bobaman@google.com" instance.tel.type.should == "fake" instance.tel.value.should == "867-5309" instance.tz.should == "+03:00" instance.logo.should == "https://secure.gravatar.com/avatar/56ee28134dd0776825445e3551979b14" instance.org.organization_name.should == "Google, Inc." instance.org.organization_unit.should == "Developer Relations" end it 'should return nil for undefined object values' do instance = @card_parser.new instance.email.should be_nil instance.tel.should be_nil instance.org.should be_nil instance.adr.should be_nil instance.geo.should be_nil end it 'should alter output structure via generated mutators' do instance = @card_parser.new instance.given_name = "Robert" instance.family_name = "Aman" instance.additional_name = ["Danger"] instance.nickname = "Bob" instance.url = "https://plus.google.com/116452824309856782163" instance.email = {} instance.email.type = "work" instance.email.value = "bobaman@google.com" instance.tel = {} instance.tel.type = "fake" instance.tel.value = "867-5309" instance.tz = "+03:00" instance.logo = "https://secure.gravatar.com/avatar/56ee28134dd0776825445e3551979b14" instance.org = {} instance.org.organization_name = "Google, Inc." instance.org.organization_unit = "Developer Relations" instance.to_hash.should == { "givenName" => "Robert", "familyName" => "Aman", "additionalName" => ["Danger"], "nickname" => "Bob", "url" => "https://plus.google.com/116452824309856782163", "email" => { "type" => "work", "value" => "bobaman@google.com" }, "tel" => { "type" => "fake", "value" => "867-5309" }, "tz" => "+03:00", "logo" => "https://secure.gravatar.com/avatar/56ee28134dd0776825445e3551979b14", "org" => { "organizationName" => "Google, Inc.", "organizationUnit" => "Developer Relations" } } end it 'should be coerceable to a Hash value' do instance = @card_parser.new({ "givenName" => "Robert", "familyName" => "Aman", "additionalName" => ["Danger"], "nickname" => "Bob", "url" => "https://plus.google.com/116452824309856782163", "email" => { "type" => "work", "value" => "bobaman@google.com" }, "tel" => { "type" => "fake", "value" => "867-5309" }, "tz" => "+03:00", "logo" => "https://secure.gravatar.com/avatar/56ee28134dd0776825445e3551979b14", "org" => { "organizationName" => "Google, Inc.", "organizationUnit" => "Developer Relations" } }) instance.to_hash.should == { "givenName" => "Robert", "familyName" => "Aman", "additionalName" => ["Danger"], "nickname" => "Bob", "url" => "https://plus.google.com/116452824309856782163", "email" => { "type" => "work", "value" => "bobaman@google.com" }, "tel" => { "type" => "fake", "value" => "867-5309" }, "tz" => "+03:00", "logo" => "https://secure.gravatar.com/avatar/56ee28134dd0776825445e3551979b14", "org" => { "organizationName" => "Google, Inc.", "organizationUnit" => "Developer Relations" } } end it 'should convert to a JSON string' do instance = @card_parser.new({ "givenName" => "Robert", "familyName" => "Aman" }) instance.to_json.should be_json '{"givenName":"Robert","familyName":"Aman"}' end end describe AutoParse::Instance, 'with the calendar schema' do include JSONMatchers before do @geo_uri = Addressable::URI.new( :scheme => 'file', :host => '', :path => File.expand_path(File.join(spec_dir, './data/geo.json')) ) @geo_schema_data = JSON.parse(File.open(@geo_uri.path, 'r') { |f| f.read }) @geo_parser = AutoParse.generate(@geo_schema_data, :uri => @geo_uri) @calendar_uri = Addressable::URI.new( :scheme => 'file', :host => '', :path => File.expand_path(File.join(spec_dir, './data/calendar.json')) ) @calendar_schema_data = JSON.parse(File.open(@calendar_uri.path, 'r') { |f| f.read }) @calendar_parser = AutoParse.generate( @calendar_schema_data, :uri => @calendar_uri ) end it 'should have the correct URI' do @geo_parser.uri.should === @geo_uri @calendar_parser.uri.should === @calendar_uri end it 'should accept a valid calendar input' do instance = @calendar_parser.new({ "dtstart" => "1592-03-14T00:00:00Z", "dtend" => "1592-03-14T23:59:59Z", "summary" => "Pi Day", "location" => "Googleplex", "url" => "http://www.piday.org/", "rrule" => "FREQ=YEARLY" }) instance.should be_valid end it 'should accept extra fields' do instance = @calendar_parser.new({ "dtstart" => "1592-03-14T00:00:00Z", "dtend" => "1592-03-14T23:59:59Z", "summary" => "Pi Day", "location" => "Googleplex", "url" => "http://www.piday.org/", "rrule" => "FREQ=YEARLY", "extra" => "bonus!" }) instance.should be_valid end it 'should accept a calendar input with an externally referenced schema' do instance = @calendar_parser.new({ "dtstart" => "1592-03-14T00:00:00Z", "dtend" => "1592-03-14T23:59:59Z", "summary" => "Pi Day", "location" => "Googleplex", "url" => "http://www.piday.org/", "rrule" => "FREQ=YEARLY", "geo" => { "latitude" => 37.422, "longitude" => -122.084 } }) instance.geo.should be_valid instance.should be_valid end it 'should not validate a calendar input when external schema is invalid' do instance = @calendar_parser.new({ "dtstart" => "1592-03-14T00:00:00Z", "dtend" => "1592-03-14T23:59:59Z", "summary" => "Pi Day", "location" => "Googleplex", "url" => "http://www.piday.org/", "rrule" => "FREQ=YEARLY", "geo" => { "latitude" => "not", "longitude" => "valid" } }) instance.geo.should_not be_valid instance.should_not be_valid end it 'should expose values via generated accessors' do instance = @calendar_parser.new({ "dtstart" => "1592-03-14T00:00:00Z", "dtend" => "1592-03-14T23:59:59Z", "summary" => "Pi Day", "location" => "Googleplex", "url" => "http://www.piday.org/", "rrule" => "FREQ=YEARLY", "geo" => { "latitude" => 37.422, "longitude" => -122.084 } }) instance.dtstart.should == Time.utc(1592, 3, 14) instance.dtend.should == Time.utc(1592, 3, 14, 23, 59, 59) instance.summary.should == "Pi Day" instance.location.should == "Googleplex" instance.url.should == Addressable::URI.parse("http://www.piday.org/") instance.rrule.should == "FREQ=YEARLY" instance.geo.latitude.should == 37.422 instance.geo.longitude.should == -122.084 end it 'should return nil for undefined object values' do instance = @calendar_parser.new instance.geo.should be_nil end it 'should alter output structure via generated mutators' do instance = @calendar_parser.new instance.dtstart = Time.utc(1592, 3, 14) instance.dtend = Time.utc(1592, 3, 14, 23, 59, 59) instance.summary = "Pi Day" instance.location = "Googleplex" instance.url = Addressable::URI.parse("http://www.piday.org/") instance.rrule = "FREQ=YEARLY" instance.geo = @geo_parser.new instance.geo.latitude = 37.422 instance.geo.longitude = -122.084 instance.to_hash.should == { "dtstart" => "1592-03-14T00:00:00Z", "dtend" => "1592-03-14T23:59:59Z", "summary" => "Pi Day", "location" => "Googleplex", "url" => "http://www.piday.org/", "rrule" => "FREQ=YEARLY", "geo" => { "latitude" => 37.422, "longitude" => -122.084 } } end it 'should be coerceable to a Hash value' do instance = @calendar_parser.new({ "dtstart" => "1592-03-14T00:00:00Z", "dtend" => "1592-03-14T23:59:59Z", "summary" => "Pi Day", "location" => "Googleplex", "url" => "http://www.piday.org/", "rrule" => "FREQ=YEARLY", "geo" => { "latitude" => 37.422, "longitude" => -122.084 } }) instance.to_hash.should == { "dtstart" => "1592-03-14T00:00:00Z", "dtend" => "1592-03-14T23:59:59Z", "summary" => "Pi Day", "location" => "Googleplex", "url" => "http://www.piday.org/", "rrule" => "FREQ=YEARLY", "geo" => { "latitude" => 37.422, "longitude" => -122.084 } } end it 'should convert to a JSON string' do instance = @calendar_parser.new({ "dtend" => "1592-03-14T23:59:59Z", "dtstart" => "1592-03-14T00:00:00Z", "summary" => "Pi Day" }) instance.to_json.should be_json ( '{"dtend":"1592-03-14T23:59:59Z",' + '"dtstart":"1592-03-14T00:00:00Z",'+ '"summary":"Pi Day"}' ) end end describe AutoParse::Instance, 'with the node schema' do include JSONMatchers before do @uri = Addressable::URI.new( :scheme => 'file', :host => '', :path => File.expand_path(File.join(spec_dir, './data/node.json')) ) @schema_data = JSON.parse(File.open(@uri.path, 'r') { |f| f.read }) @parser = AutoParse.generate(@schema_data, :uri => @uri) end it 'should have the correct URI' do @parser.uri.should === @uri end it 'should accept a valid node input' do instance = @parser.new({ "value" => 42, "left" => nil, "right" => nil }) instance.should be_valid end it 'should accept extra fields' do instance = @parser.new({ "value" => "1", "left" => nil, "right" => nil, "extra" => "bonus!" }) instance.should be_valid end it 'should not accept an invalid node input' do instance = @parser.new({ "value" => 42, "left" => 3.14, "right" => 2.71 }) instance.should_not be_valid end it 'should accept a valid recursive node input' do instance = @parser.new({ "value" => 42, "left" => { "value" => 3.14, "left" => nil, "right" => nil }, "right" => { "value" => 2.71, "left" => nil, "right" => nil } }) instance.should be_valid end it 'should expose values via generated accessors' do instance = @parser.new({ "value" => 42, "left" => { "value" => 3.14, "left" => nil, "right" => nil }, "right" => { "value" => 2.71, "left" => nil, "right" => nil } }) instance.value.should == 42 instance.left.value.should == 3.14 instance.right.value.should == 2.71 instance.left.left.should == nil instance.right.left.should == nil instance.left.right.should == nil instance.right.right.should == nil end it 'should return nil for undefined object values' do instance = @parser.new instance.left.should be_nil instance.right.should be_nil end it 'should alter output structure via generated mutators' do instance = @parser.new instance.value = 42 instance.left = @parser.new instance.left.value = 3.14 instance.left.left = nil instance.left.right = nil instance.right = @parser.new instance.right.value = 2.71 instance.right.left = nil instance.right.right = nil instance.to_hash.should == { "value" => 42, "left" => { "value" => 3.14, "left" => nil, "right" => nil }, "right" => { "value" => 2.71, "left" => nil, "right" => nil } } end it 'should be coerceable to a Hash value' do instance = @parser.new({ "value" => 42, "left" => nil, "right" => nil }) instance.to_hash.should == { "value" => 42, "left" => nil, "right" => nil } end it 'should convert to a JSON string' do instance = @parser.new({ "left" => nil, "value" => 42, "right" => nil }) instance.to_json.should be_json '{"left":null,"value":42,"right":null}' end end describe AutoParse::Instance, 'with the booleans schema' do include JSONMatchers before do @uri = Addressable::URI.new( :scheme => 'file', :host => '', :path => File.expand_path(File.join(spec_dir, './data/booleans.json')) ) @schema_data = JSON.parse(File.open(@uri.path, 'r') { |f| f.read }) @parser = AutoParse.generate(@schema_data, :uri => @uri) end it 'should have the correct URI' do @parser.uri.should === @uri end it 'should accept a valid booleans input' do instance = @parser.new({ "guaranteed" => false, "truthy" => true, "accurate" => false }) instance.should be_valid end it 'should expose values via generated accessors' do instance = @parser.new({ "guaranteed" => false, "truthy" => true, "accurate" => false }) instance.truthy.should == true instance.untrue.should == false instance.accurate.should == false instance.maybe.should == nil instance.guaranteed.should == false end it 'should alter output structure via generated mutators' do instance = @parser.new instance.truthy = true instance.untrue = false instance.accurate = false instance.guaranteed = false instance.to_hash.should == { "truthy" => true, "untrue" => false, "accurate" => false, "guaranteed" => false } end it 'should be coerceable to a Hash value' do instance = @parser.new({ "guaranteed" => false, "truthy" => true, "accurate" => false }) instance.to_hash.should == { "guaranteed" => false, "truthy" => true, "accurate" => false } end it 'should convert to a JSON string' do instance = @parser.new({ "left" => nil, "value" => 42, "right" => nil }) instance.to_json.should be_json '{"left":null,"value":42,"right":null}' end end describe AutoParse::Instance, 'with the file schema' do def load_schema(file, uri) json = JSON.parse(File.read(file)) return AutoParse.generate(json, :uri => Addressable::URI.parse(uri)) end before do @data = { "kind" => "drive#fileList", "items" => [ { "kind" => "drive#file", "id" => "0Bz2X2-r-Ou9fYTJFLVFYZENzMjA", "editable" => "false", "parents" => [ { "kind" => "drive#parentReference", "id" => "0AD2X2-r-Ou9fUk9PVA", "isRoot" => "true" } ] } ] } @filelist_parser = load_schema('spec/data/filelist.json', 'https://www.googleapis.com/drive/v2/#FileList') @file_parser = load_schema('spec/data/file.json', 'https://www.googleapis.com/drive/v2/#File') @parentref_parser = load_schema('spec/data/parentref.json', 'https://www.googleapis.com/drive/v2/#ParentReference') end it 'should not redefine parent schemas' do file = @filelist_parser.new(@data) file.should be_an_instance_of @filelist_parser file.items.first.should be_an_instance_of @file_parser file.items.first.parents.first.should be_an_instance_of @parentref_parser file = @filelist_parser.new(@data) file.should be_an_instance_of @filelist_parser file.items.first.should be_an_instance_of @file_parser file.items.first.parents.first.should be_an_instance_of @parentref_parser end it 'should not redefine the parent when accessing anonymous children' do file_json = { "kind" => "drive#file", "id" => "0Bz2X2-r-Ou9fYTJFLVFYZENzMjA", "parents" => [ { "kind" => "drive#parentReference", "id" => "0AD2X2-r-Ou9fUk9PVA", }] } list_json = { "kind" => "drive#fileList", "items" => [ { "kind" => "drive#file", "id" => "0Bz2X2-r-Ou9fYTJFLVFYZENzMjA", "parents" => [ { "kind" => "drive#parentReference", "id" => "0AD2X2-r-Ou9fUk9PVA", } ], } ] } file = @file_parser.new(file_json) file.should be_an_instance_of @file_parser file.parents.first.should be_an_instance_of @parentref_parser # Regression check for bug where accessing the parents property would redefine # file. This resulted in an inability to parse that type later on, such as when # used in the items array. list = @filelist_parser.new(list_json) list.should be_an_instance_of @filelist_parser list.items.first.should be_an_instance_of @file_parser list.items.first.parents.first.should be_an_instance_of @parentref_parser end it 'should handle booleans correctly' do file = @filelist_parser.new(@data) file.should be_an_instance_of @filelist_parser file.items.first.should be_an_instance_of @file_parser file.items.first.editable.should == false file.items.first.parents.first.should be_an_instance_of @parentref_parser file.items.first.parents.first.is_root.should == true end end autoparse-0.3.3/spec/spec_helper.rb0000644000175000017500000000121013121477516020557 0ustar balasankarcbalasankarcspec_dir = File.expand_path(File.dirname(__FILE__)) lib_dir = File.expand_path(File.join(spec_dir, '../lib')) $:.unshift(lib_dir) $:.uniq! require 'autoparse' require 'json' module JSONMatchers class EqualsJson def initialize(expected) @expected = JSON.parse(expected) end def matches?(target) @target = JSON.parse(target) @target.eql?(@expected) end def failure_message "expected #{@target.inspect} to be #{@expected}" end def negative_failure_message "expected #{@target.inspect} not to be #{@expected}" end end def be_json(expected) EqualsJson.new(expected) end endautoparse-0.3.3/Rakefile0000644000175000017500000000312413121477516016462 0ustar balasankarcbalasankarclib_dir = File.expand_path(File.join(File.dirname(__FILE__), 'lib')) $:.unshift(lib_dir) $:.uniq! require 'rubygems' require 'rake' gem 'rspec', '~> 1.2.9' begin require 'spec/rake/spectask' rescue LoadError STDERR.puts "Please install rspec:" STDERR.puts "sudo gem install rspec" exit(1) end require File.join(File.dirname(__FILE__), 'lib/autoparse', 'version') PKG_DISPLAY_NAME = 'AutoParse' PKG_NAME = PKG_DISPLAY_NAME.downcase PKG_VERSION = AutoParse::VERSION::STRING PKG_FILE_NAME = "#{PKG_NAME}-#{PKG_VERSION}" RELEASE_NAME = "REL #{PKG_VERSION}" RUBY_FORGE_PROJECT = PKG_NAME RUBY_FORGE_USER = 'sporkmonger' RUBY_FORGE_PATH = "/var/www/gforge-projects/#{RUBY_FORGE_PROJECT}" RUBY_FORGE_URL = "http://#{RUBY_FORGE_PROJECT}.rubyforge.org/" PKG_AUTHOR = 'Bob Aman' PKG_AUTHOR_EMAIL = 'bobaman@google.com' PKG_HOMEPAGE = RUBY_FORGE_URL PKG_SUMMARY = 'A parsing system based on JSON Schema.' PKG_DESCRIPTION = <<-TEXT An implementation of the JSON Schema specification. Provides automatic parsing for any given JSON Schema. TEXT PKG_FILES = FileList[ 'lib/**/*', 'spec/**/*', 'vendor/**/*', 'tasks/**/*', 'website/**/*', '[A-Z]*', 'Rakefile' ].exclude(/database\.yml/).exclude(/[_\.]git$/) RCOV_ENABLED = (RUBY_PLATFORM != 'java' && RUBY_VERSION =~ /^1\.8/) if RCOV_ENABLED task :default => 'spec:verify' else task :default => 'spec' end WINDOWS = (RUBY_PLATFORM =~ /mswin|win32|mingw|bccwin|cygwin/) rescue false SUDO = WINDOWS ? '' : ('sudo' unless ENV['SUDOLESS']) Dir['tasks/**/*.rake'].each { |rake| load rake } autoparse-0.3.3/autoparse.gemspec0000644000175000017500000000660313121477516020372 0ustar balasankarcbalasankarc######################################################### # This file has been automatically generated by gem2tgz # ######################################################### # -*- encoding: utf-8 -*- # stub: autoparse 0.3.3 ruby lib Gem::Specification.new do |s| s.name = "autoparse".freeze s.version = "0.3.3" s.required_rubygems_version = Gem::Requirement.new(">= 0".freeze) if s.respond_to? :required_rubygems_version= s.require_paths = ["lib".freeze] s.authors = ["Bob Aman".freeze] s.date = "2013-03-26" s.description = "An implementation of the JSON Schema specification. Provides automatic parsing\nfor any given JSON Schema.\n".freeze s.email = "bobaman@google.com".freeze s.extra_rdoc_files = ["README.md".freeze] s.files = ["CHANGELOG.md".freeze, "Gemfile".freeze, "LICENSE".freeze, "README.md".freeze, "Rakefile".freeze, "lib/autoparse.rb".freeze, "lib/autoparse/inflection.rb".freeze, "lib/autoparse/instance.rb".freeze, "lib/autoparse/version.rb".freeze, "lib/compat/multi_json.rb".freeze, "spec/autoparse/instance_spec.rb".freeze, "spec/data/account.json".freeze, "spec/data/address.json".freeze, "spec/data/adult.json".freeze, "spec/data/booleans.json".freeze, "spec/data/calendar.json".freeze, "spec/data/card.json".freeze, "spec/data/file.json".freeze, "spec/data/filelist.json".freeze, "spec/data/geo.json".freeze, "spec/data/hyper-schema.json".freeze, "spec/data/interfaces.json".freeze, "spec/data/json-ref.json".freeze, "spec/data/links.json".freeze, "spec/data/node.json".freeze, "spec/data/parentref.json".freeze, "spec/data/person.json".freeze, "spec/data/positive.json".freeze, "spec/data/schema.json".freeze, "spec/data/user-list.json".freeze, "spec/spec.opts".freeze, "spec/spec_helper.rb".freeze, "tasks/clobber.rake".freeze, "tasks/gem.rake".freeze, "tasks/git.rake".freeze, "tasks/metrics.rake".freeze, "tasks/rdoc.rake".freeze, "tasks/spec.rake".freeze, "tasks/yard.rake".freeze] s.homepage = "http://autoparse.rubyforge.org/".freeze s.rdoc_options = ["--main".freeze, "README.md".freeze] s.rubyforge_project = "autoparse".freeze s.rubygems_version = "2.5.2".freeze s.summary = "A parsing system based on JSON Schema.".freeze if s.respond_to? :specification_version then s.specification_version = 3 if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then s.add_runtime_dependency(%q.freeze, [">= 2.3.1"]) s.add_runtime_dependency(%q.freeze, [">= 0.9.15"]) s.add_development_dependency(%q.freeze, [">= 2.1.1"]) s.add_runtime_dependency(%q.freeze, [">= 1.0.0"]) s.add_development_dependency(%q.freeze, [">= 0.9.0"]) s.add_development_dependency(%q.freeze, [">= 2.11.0"]) else s.add_dependency(%q.freeze, [">= 2.3.1"]) s.add_dependency(%q.freeze, [">= 0.9.15"]) s.add_dependency(%q.freeze, [">= 2.1.1"]) s.add_dependency(%q.freeze, [">= 1.0.0"]) s.add_dependency(%q.freeze, [">= 0.9.0"]) s.add_dependency(%q.freeze, [">= 2.11.0"]) end else s.add_dependency(%q.freeze, [">= 2.3.1"]) s.add_dependency(%q.freeze, [">= 0.9.15"]) s.add_dependency(%q.freeze, [">= 2.1.1"]) s.add_dependency(%q.freeze, [">= 1.0.0"]) s.add_dependency(%q.freeze, [">= 0.9.0"]) s.add_dependency(%q.freeze, [">= 2.11.0"]) end end autoparse-0.3.3/lib/0000755000175000017500000000000013121477516015563 5ustar balasankarcbalasankarcautoparse-0.3.3/lib/autoparse.rb0000644000175000017500000003060513121477516020117 0ustar balasankarcbalasankarc# Copyright 2010 Google Inc # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. require 'autoparse/instance' require 'autoparse/version' require 'addressable/uri' require 'multi_json' require 'compat/multi_json' module AutoParse def self.schemas @schemas ||= {} end def self.generate(schema_data, options={}) uri = options[:uri] parent = options[:parent] if schema_data["extends"] super_uri = uri + Addressable::URI.parse(schema_data["extends"]) super_schema = self.schemas[super_uri] if super_schema == nil raise ArgumentError, "Could not find schema to extend: #{schema_data["extends"]} " + "Parent schema must be parsed before child schema." end else super_schema = Instance end schema = Class.new(super_schema) do @uri = Addressable::URI.parse(uri) @uri.normalize! if @uri != nil @schema_data = schema_data if !self.uri && parent @uri = parent.uri end def self.additional_properties_schema # Override the superclass implementation so we're not always returning # the empty schema. if @additional_properties_schema.data['$ref'] # Dereference the schema if necessary. @additional_properties_schema = @additional_properties_schema.dereference end return @additional_properties_schema end (@schema_data['properties'] || []).each do |(k, v)| property_key, property_schema = k, v property_name = INFLECTOR.underscore(property_key).gsub("-", "_") property_super_schema = super_schema.properties[property_key] if property_super_schema # TODO: Not sure if this should be a recursive merge or not... # TODO: Might need to raise an error if a schema is extended in # a way that violates the requirement that all child instances also # validate against the parent schema. property_schema = property_super_schema.data.merge(property_schema) end # If the schema has no ID, it inherits the ID from the parent schema. property_schema_class = AutoParse.generate(property_schema, :parent => self) self.properties[property_key] = property_schema_class self.keys[property_name] = property_key define_method(property_name) do __get__(property_name) end define_method(property_name + '=') do |value| __set__(property_name, value) end end if schema_data['additionalProperties'] == true || schema_data['additionalProperties'] == nil # Schema-less unknown properties are allowed. @additional_properties_schema = EMPTY_SCHEMA elsif schema_data['additionalProperties'] # Unknown properties follow the supplied schema. ap_schema = AutoParse.generate( schema_data['additionalProperties'], :parent => self ) @additional_properties_schema = ap_schema else @additional_properties_schema = nil end define_method('method_missing') do |method, *params, &block| # We need to convert from Ruby calling style to JavaScript calling # style. If this fails, attempt to use JavaScript calling style # directly. # We can't modify the method in-place because this affects the call # to super. stripped_method = method.to_s assignment = false if stripped_method[-1..-1] == '=' assignment = true stripped_method[-1..-1] = '' end key = INFLECTOR.camelize(stripped_method) key[0..0] = key[0..0].downcase if @data[key] != nil # Data found elsif @data[stripped_method] != nil # Data found key = stripped_method else # Method not found. super(method, *params, &block) end # If additionalProperties is simply set to true, no parsing takes # place and all values are treated as 'any'. if assignment new_value = params[0] self.__set__(key, new_value) else self.__get__(key) end end if schema_data['dependencies'] for dependency_key, dependency_data in schema_data['dependencies'] self.property_dependencies[dependency_key] = dependency_data end end end # Register the new schema. self.schemas[schema.uri] = schema unless parent && parent.uri == schema.uri return schema end def self.import(value, schema_class, type=nil) type = schema_class.data['type'] if type == nil case type when 'string' return AutoParse.import_string(value, schema_class) when 'boolean' return AutoParse.import_boolean(value, schema_class) when 'integer' return AutoParse.import_integer(value, schema_class) when 'number' return AutoParse.import_number(value, schema_class) when 'array' return AutoParse.import_array(value, schema_class) when 'object' return AutoParse.import_object(value, schema_class) when 'null' return nil when Array return AutoParse.import_union(value, schema_class) else return AutoParse.import_any(value, schema_class) end end def self.export(value, schema_class, type=nil) type = schema_class.data['type'] if type == nil case type when 'string' AutoParse.export_string(value, schema_class) when 'boolean' AutoParse.export_boolean(value, schema_class) when 'integer' AutoParse.export_integer(value, schema_class) when 'number' AutoParse.export_number(value, schema_class) when 'array' AutoParse.export_array(value, schema_class) when 'object' AutoParse.export_object(value, schema_class) when 'null' nil when Array AutoParse.export_union(value, schema_class) else AutoParse.export_any(value, schema_class) end end def self.import_string(value, schema_class) if value != nil format = schema_class.data['format'] if format == 'byte' Base64.decode64(value) elsif format == 'date-time' Time.parse(value) elsif format == 'url' Addressable::URI.parse(value) elsif format =~ /^u?int(32|64)$/ value.to_i else value end else nil end end def self.export_string(value, schema_class) format = schema_class.data['format'] if format == 'byte' Base64.encode64(value) elsif format == 'date-time' if value.respond_to?(:to_str) value = Time.parse(value.to_str) elsif !value.respond_to?(:xmlschema) raise TypeError, "Could not obtain RFC 3339 timestamp from #{value.class}." end value.xmlschema elsif format == 'url' # This effectively does limited URI validation. Addressable::URI.parse(value).to_str elsif format =~ /^u?int(32|64)$/ value.to_s elsif value.respond_to?(:to_str) value.to_str elsif value.kind_of?(Symbol) value.to_s else raise TypeError, "Expected String or Symbol, got #{value.class}." end end def self.import_boolean(value, schema_class) case value.to_s.downcase when 'true', 'yes', 'y', 'on', '1' true when 'false', 'no', 'n', 'off', '0' false when 'nil', 'null', 'undefined', '' nil else raise TypeError, "Expected boolean, got #{value.class}." end end def self.export_boolean(value, schema_class) case value.to_s.downcase when 'true', 'yes', 'y', 'on', '1' true when 'false', 'no', 'n', 'off', '0' false when 'nil', 'null', 'undefined', '' nil else raise TypeError, "Expected boolean, got #{value.class}." end end def self.import_number(value, schema_class) if value == nil value else Float(value) end end def self.export_number(value, schema_class) if value == nil value else Float(value) end end def self.import_integer(value, schema_class) if value == nil value else Integer(value) end end def self.export_integer(value, schema_class) if value == nil value else Integer(value) end end def self.import_array(value, schema_class) value = (if value != nil && !value.respond_to?(:to_ary) raise TypeError, "Expected Array, got #{value.class}." else (value || []).to_ary.dup end) items_schema = schema_class.property('items') value.map! do |item| AutoParse.import(item, items_schema) end value end def self.export_array(value, schema_class) if value == nil value elsif value.respond_to?(:to_ary) value = value.to_ary.dup items_schema = schema_class.property('items') value.map! do |item| AutoParse.export(item, items_schema) end value else raise TypeError, "Expected Array, got #{value.class}." end end def self.import_object(value, schema_class) value ? schema_class.new(value) : nil end def self.export_object(value, schema_class) # FIXME: Every field must be exported as well. if value.nil? nil elsif value.respond_to?(:to_hash) value.to_hash elsif value.respond_to?(:to_json) MultiJson.load(value.to_json) else raise TypeError, "Expected Hash, got #{value.class}." end end def self.import_union(value, schema_class) import_type = match_type( value, schema_class.data['type'], schema_class ) AutoParse.import(value, schema_class, import_type) end def self.export_union(value, schema_class) export_type = match_type( value, schema_class.data['type'], schema_class ) AutoParse.export(value, schema_class, export_type) end def self.import_any(value, schema_class) value end def self.export_any(value, schema_class) value end ## # Given a value and a union of types, selects the type which is the best # match for the given value. More than one type may match the value, in which # case, the first type in the union will be returned. def self.match_type(value, union, parent=nil) possible_types = [union].flatten.compact # Strict pass for type in possible_types # We import as the first type in the list that validates. case type when 'string' return 'string' if value.kind_of?(String) when 'boolean' return 'boolean' if value == true or value == false when 'integer' return 'integer' if value.kind_of?(Integer) when 'number' return 'number' if value.kind_of?(Numeric) when 'array' return 'array' if value.kind_of?(Array) when 'object' return 'object' if value.kind_of?(Hash) || value.kind_of?(Instance) when 'null' return 'null' if value.nil? when Hash # Schema embedded directly. schema_class = AutoParse.generate(type, :parent => parent) if type['$ref'] schema_class = schema_class.dereference end return schema_class if schema_class.new(value).valid? end end # Lenient pass for type in possible_types # We import as the first type in the list that validates. case type when 'string' return 'string' if value.respond_to?(:to_str) || value.kind_of?(Symbol) when 'boolean' if ['true', 'yes', 'y', 'on', '1', 'false', 'no', 'n', 'off', '0'].include?(value.to_s.downcase) return 'boolean' end when 'integer' return 'integer' if value.to_i != 0 || value == "0" when 'number' return 'number' if value.to_f != 0.0 || value == "0" || value == "0.0" when 'array' return 'array' if value.respond_to?(:to_ary) when 'object' if value.respond_to?(:to_hash) || value.respond_to?(:to_json) return 'object' end when 'any' return 'any' end end return nil end end autoparse-0.3.3/lib/compat/0000755000175000017500000000000013121477516017046 5ustar balasankarcbalasankarcautoparse-0.3.3/lib/compat/multi_json.rb0000644000175000017500000000043313121477516021556 0ustar balasankarcbalasankarcgem 'multi_json', '>= 1.0.0' require 'multi_json' unless MultiJson.respond_to?(:load) module MultiJson class < self) else schema_class = self.additional_properties_schema end end if schema_class.data['$ref'] # Dereference the schema if necessary. schema_class = schema_class.dereference # Avoid this dereference in the future. self.properties[property_key] = schema_class end return schema_class end def self.keys return @keys ||= ( if self.superclass.ancestors.include?(::AutoParse::Instance) self.superclass.keys.dup else {} end ) end def self.additional_properties_schema return EMPTY_SCHEMA end def self.property_dependencies return @property_dependencies ||= {} end def self.data return @schema_data ||= {} end def self.description return self.data['description'] end def self.validate_string_property(property_value, schema_data) property_value = property_value.to_str rescue property_value if !property_value.kind_of?(String) return false else # TODO: implement more than type-checking return true end end def self.validate_boolean_property(property_value, schema_data) return false if property_value != true && property_value != false # TODO: implement more than type-checking return true end def self.validate_integer_property(property_value, schema_data) return false if !property_value.kind_of?(Integer) if schema_data['minimum'] && schema_data['exclusiveMinimum'] return false if property_value <= schema_data['minimum'] elsif schema_data['minimum'] return false if property_value < schema_data['minimum'] end if schema_data['maximum'] && schema_data['exclusiveMaximum'] return false if property_value >= schema_data['maximum'] elsif schema_data['maximum'] return false if property_value > schema_data['maximum'] end return true end def self.validate_number_property(property_value, schema_data) return false if !property_value.kind_of?(Numeric) # TODO: implement more than type-checking return true end def self.validate_array_property(property_value, schema_data) if property_value.respond_to?(:to_ary) property_value = property_value.to_ary else return false end property_value.each do |item_value| unless self.validate_property_value(item_value, schema_data['items']) return false end end return true end def self.validate_object_property(property_value, schema_data) if property_value.kind_of?(Instance) return property_value.valid? else # This is highly ineffecient, but currently hard to avoid given the # schema is anonymous, making lookups very difficult. schema = AutoParse.generate(schema_data, :parent => self) begin return schema.new(property_value).valid? rescue TypeError, ArgumentError, ::JSON::ParserError return false end end end def self.validate_union_property(property_value, schema_data) union = schema_data['type'] possible_types = [union].flatten.compact for type in possible_types case type when 'string' return true if self.validate_string_property( property_value, schema_data ) when 'boolean' return true if self.validate_boolean_property( property_value, schema_data ) when 'integer' return true if self.validate_integer_property( property_value, schema_data ) when 'number' return true if self.validate_number_property( property_value, schema_data ) when 'array' return true if self.validate_array_property( property_value, schema_data ) when 'object' return true if self.validate_object_property( property_value, schema_data ) when 'null' return true if property_value.nil? when 'any' return true end end # None of the union types validated. # An empty union will fail to validate anything. return false end ## # @api private def self.validate_property_value(property_value, schema_data) if property_value == nil && schema_data['required'] == true return false elsif property_value == nil # Value was omitted, but not required. Still valid. return true end # Verify property values if schema_data['$ref'] if self.uri schema_uri = self.uri + Addressable::URI.parse(schema_data['$ref']) else schema_uri = Addressable::URI.parse(schema_data['$ref']) end schema = AutoParse.schemas[schema_uri] if schema == nil raise ArgumentError, "Could not find schema: #{schema_data['$ref']}. " + "Referenced schema must be parsed first." end schema_data = schema.data end case schema_data['type'] when 'string' return false unless self.validate_string_property( property_value, schema_data ) when 'boolean' return false unless self.validate_boolean_property( property_value, schema_data ) when 'integer' return false unless self.validate_integer_property( property_value, schema_data ) when 'number' return false unless self.validate_number_property( property_value, schema_data ) when 'array' return false unless self.validate_array_property( property_value, schema_data ) when 'object' return false unless self.validate_object_property( property_value, schema_data ) when 'null' return false unless property_value.nil? when Array return false unless self.validate_union_property( property_value, schema_data ) else # Either type 'any' or we don't know what this is, # default to anything goes. Validation of an 'any' property always # succeeds. end return true end def initialize(data={}) if (self.class.data || {})['type'] == nil # Type is omitted, default value is any. else type_set = [(self.class.data || {})['type']].flatten.compact if !type_set.include?('object') raise TypeError, "Only schemas of type 'object' are instantiable:\n" + "#{self.class.data.inspect}" end end if data.respond_to?(:to_hash) data = data.to_hash elsif data.respond_to?(:to_json) data = JSON.parse(data.to_json) else raise TypeError, 'Unable to parse. ' + 'Expected data to respond to either :to_hash or :to_json.' end if data['$ref'] raise TypeError, "Cannot instantiate a reference schema. Must be dereferenced first." end @data = data end def method_missing(method, *params, &block) schema_data = self.class.data unless schema_data['additionalProperties'] # Do nothing special if additionalProperties is not set. super else # We can't modify the method in-place because this affects the call # to super. property_name = method.to_s assignment = false # Property names simply identify the property and thus don't # include the assignment operator. if property_name[-1..-1] == '=' assignment = true property_name[-1..-1] = '' end property_key = self.class.keys[property_name] property_schema = self.class.properties[property_key] # TODO: Properly support additionalProperties. if property_key == nil || property_schema == nil # Method not found. return super end # If additionalProperties is simply set to true, no parsing takes # place and all values are treated as 'any'. if assignment new_value = params[0] __set__(property_name, new_value) else __get__(property_name) end end end def __get__(property_name) property_key = self.class.keys[property_name] || property_name schema_class = self.class.property(property_name) if !schema_class @data[property_key] else if @data.has_key?(property_key) value = @data[property_key] else value = schema_class.data['default'] end AutoParse.import(value, schema_class) end end protected :__get__ def __set__(property_name, value) property_key = self.class.keys[property_name] || property_name schema_class = self.class.property(property_name) if !schema_class @data[property_key] = value else @data[property_key] = AutoParse.export(value, schema_class) end end protected :__set__ def [](key, raw=false) if raw == true return @data[key] else return self.__get__(key) end end def []=(key, raw=false, value=:undefined) if value == :undefined # Due to the way Ruby handles default values in assignment methods, # we have to swap some values around here. raw, value = false, raw end if raw == true return @data[key] = value else return self.__set__(key, value) end end ## # Validates the parsed data against the schema. def valid? unvalidated_fields = @data.keys.dup for property_key, schema_class in self.class.properties property_value = @data[property_key] if !self.class.validate_property_value( property_value, schema_class.data) return false end if property_value == nil && schema_class.data['required'] != true # Value was omitted, but not required. Still valid. Skip dependency # checks. next end # Verify property dependencies property_dependencies = self.class.property_dependencies[property_key] case property_dependencies when String, Array property_dependencies = [property_dependencies].flatten for dependency_key in property_dependencies dependency_value = @data[dependency_key] return false if dependency_value == nil end when Class if property_dependencies.ancestors.include?(Instance) dependency_instance = property_dependencies.new(property_value) return false unless dependency_instance.valid? else raise TypeError, "Expected schema Class, got #{property_dependencies.class}." end end end if self.class.additional_properties_schema == nil # No additional properties allowed return false unless unvalidated_fields.empty? elsif self.class.additional_properties_schema != EMPTY_SCHEMA # Validate all remaining fields against this schema for property_key in unvalidated_fields property_value = @data[property_key] if !self.class.additional_properties_schema.validate_property_value( property_value, self.class.additional_properties_schema.data) return false end end end if self.class.superclass && self.class.superclass != Instance && self.class.ancestors.first != Instance # The spec actually only defined the 'extends' semantics as children # must also validate aainst the parent. return false unless self.class.superclass.new(@data).valid? end return true end def to_hash return @data end ## # Converts the instance value to JSON. # # @return [String] The instance value converted to JSON. # # @note # Ignores extra arguments to avoid throwing errors w/ certain JSON # libraries. def to_json(*args) return MultiJson.dump(self.to_hash) end ## # Returns a String representation of the schema instance. # # @return [String] The instance's state, as a String. def inspect sprintf( "#<%s:%#0x DATA:%s>", self.class.to_s, self.object_id, self.to_hash.inspect ) end end ## # The empty schema accepts all JSON. EMPTY_SCHEMA = Instance end autoparse-0.3.3/lib/autoparse/version.rb0000644000175000017500000000153313121477516021602 0ustar balasankarcbalasankarc# Copyright 2010 Google Inc # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # Used to prevent the class/module from being loaded more than once unless defined? AutoParse::VERSION module AutoParse module VERSION MAJOR = 0 MINOR = 3 TINY = 3 STRING = [MAJOR, MINOR, TINY].join('.') end end end autoparse-0.3.3/LICENSE0000644000175000017500000002514313121477516016027 0ustar balasankarcbalasankarc Apache License Version 2.0, January 2004 http://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 1. Definitions. "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. "Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. "Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. "You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. "Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. "Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). "Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. "Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." "Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. 2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. 3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. 4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: (a) You must give any other recipients of the Work or Derivative Works a copy of this License; and (b) You must cause any modified files to carry prominent notices stating that You changed the files; and (c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and (d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. 5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. 6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. 7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. 8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. 9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. END OF TERMS AND CONDITIONS APPENDIX: How to apply the Apache License to your work. To apply the Apache License to your work, attach the following boilerplate notice, with the fields enclosed by brackets "[]" replaced with your own identifying information. (Don't include the brackets!) The text should be enclosed in the appropriate comment syntax for the file format. We also recommend that a file or class name and description of purpose be included on the same "printed page" as the copyright notice for easier identification within third-party archives. Copyright [yyyy] [name of copyright owner] Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. autoparse-0.3.3/tasks/0000755000175000017500000000000013121477516016142 5ustar balasankarcbalasankarcautoparse-0.3.3/tasks/rdoc.rake0000644000175000017500000000145313121477516017740 0ustar balasankarcbalasankarcrequire 'rubygems' begin # We prefer to use the RDoc gem over the site version. gem 'rdoc' rescue Gem::LoadError end unless defined?(RDoc) require 'rdoc/task' require 'rake/clean' CLOBBER.include('doc', 'ri') CLOBBER.uniq! namespace :doc do desc 'Generate RDoc documentation' Rake::RDocTask.new do |rdoc| rdoc.rdoc_dir = 'doc' rdoc.title = "#{PKG_NAME}-#{PKG_VERSION} Documentation" rdoc.options << '--line-numbers' << '--inline-source' << '--accessor' << 'cattr_accessor=object' << '--charset' << 'utf-8' rdoc.template = "#{ENV['template']}.rb" if ENV['template'] rdoc.rdoc_files.include('README.md', 'CHANGELOG.md', 'LICENSE') rdoc.rdoc_files.include('lib/**/*.rb') end desc 'Generate ri locally for testing' task :ri do sh 'rdoc --ri -o ri .' end end autoparse-0.3.3/tasks/git.rake0000644000175000017500000000242313121477516017572 0ustar balasankarcbalasankarcnamespace :git do namespace :tag do desc 'List tags from the Git repository' task :list do tags = `git tag -l` tags.gsub!("\r", '') tags = tags.split("\n").sort {|a, b| b <=> a } puts tags.join("\n") end desc 'Create a new tag in the Git repository' task :create do changelog = File.open('CHANGELOG.md', 'r') { |file| file.read } puts '-' * 80 puts changelog puts '-' * 80 puts v = ENV['VERSION'] or abort 'Must supply VERSION=x.y.z' abort "Versions don't match #{v} vs #{PKG_VERSION}" if v != PKG_VERSION git_status = `git status` if git_status !~ /nothing to commit \(working directory clean\)/ abort "Working directory isn't clean." end tag = "#{PKG_NAME}-#{PKG_VERSION}" msg = "Release #{PKG_NAME}-#{PKG_VERSION}" existing_tags = `git tag -l #{PKG_NAME}-*`.split('\n') if existing_tags.include?(tag) warn('Tag already exists, deleting...') unless system "git tag -d #{tag}" abort 'Tag deletion failed.' end end puts "Creating git tag '#{tag}'..." unless system "git tag -a -m \"#{msg}\" #{tag}" abort 'Tag creation failed.' end end end end task 'gem:release' => 'git:tag:create' autoparse-0.3.3/tasks/gem.rake0000644000175000017500000000431613121477516017562 0ustar balasankarcbalasankarcrequire 'rubygems/package_task' require 'rake/clean' CLOBBER.include('pkg') namespace :gem do GEM_SPEC = Gem::Specification.new do |s| unless s.respond_to?(:add_development_dependency) puts 'The gem spec requires a newer version of RubyGems.' exit(1) end s.name = PKG_NAME s.version = PKG_VERSION s.author = PKG_AUTHOR s.email = PKG_AUTHOR_EMAIL s.homepage = PKG_HOMEPAGE s.summary = PKG_SUMMARY s.description = PKG_DESCRIPTION s.rubyforge_project = RUBY_FORGE_PROJECT s.files = PKG_FILES.to_a s.has_rdoc = true s.extra_rdoc_files = %w( README.md ) s.rdoc_options.concat ['--main', 'README.md'] s.add_runtime_dependency('addressable', '>= 2.3.1') s.add_runtime_dependency('multi_json', '>= 1.0.0') s.add_runtime_dependency('extlib', '>= 0.9.15') s.add_development_dependency('rake', '>= 0.9.0') s.add_development_dependency('rspec', '>= 2.11.0') s.add_development_dependency('launchy', '>= 2.1.1') s.require_path = 'lib' end Gem::PackageTask.new(GEM_SPEC) do |p| p.gem_spec = GEM_SPEC p.need_tar = true p.need_zip = true end desc 'Show information about the gem' task :debug do puts GEM_SPEC.to_ruby end desc "Generates .gemspec file" task :gemspec do spec_string = GEM_SPEC.to_ruby begin Thread.new { eval("$SAFE = 3\n#{spec_string}", binding) }.join rescue abort "unsafe gemspec: #{$!}" else File.open("#{GEM_SPEC.name}.gemspec", 'w') do |file| file.write spec_string end end end desc 'Install the gem' task :install => ['clobber', 'gem:package'] do sh "#{SUDO} gem install --local pkg/#{GEM_SPEC.full_name}" end desc 'Uninstall the gem' task :uninstall do installed_list = Gem.source_index.find_name(PKG_NAME) if installed_list && (installed_list.collect { |s| s.version.to_s}.include?(PKG_VERSION)) sh( "#{SUDO} gem uninstall --version '#{PKG_VERSION}' " + "--ignore-dependencies --executables #{PKG_NAME}" ) end end desc 'Reinstall the gem' task :reinstall => [:uninstall, :install] end desc 'Alias to gem:package' task 'gem' => 'gem:package' task 'gem:release' => 'gem:gemspec' autoparse-0.3.3/tasks/yard.rake0000644000175000017500000000112213121477516017741 0ustar balasankarcbalasankarcrequire 'rake' begin require 'yard' require 'yard/rake/yardoc_task' namespace :doc do desc 'Generate Yardoc documentation' YARD::Rake::YardocTask.new do |yardoc| yardoc.name = 'yard' yardoc.options = ['--verbose'] yardoc.files = [ 'lib/**/*.rb', 'ext/**/*.c', 'README.md', 'CHANGELOG.md', 'LICENSE' ] end end task 'clobber' => ['doc:clobber_yard'] desc 'Alias to doc:yard' task 'doc' => 'doc:yard' rescue LoadError # If yard isn't available, it's not the end of the world desc 'Alias to doc:rdoc' task 'doc' => 'doc:rdoc' end autoparse-0.3.3/tasks/spec.rake0000644000175000017500000000345413121477516017746 0ustar balasankarcbalasankarcrequire 'spec/rake/verify_rcov' namespace :spec do Spec::Rake::SpecTask.new(:rcov) do |t| t.spec_files = FileList['spec/**/*_spec.rb'] t.spec_opts = ['--color', '--format', 'specdoc'] if RCOV_ENABLED if `which rcov`.strip == '' STDERR.puts 'Please install rcov:' STDERR.puts 'sudo gem install relevance-rcov' exit(1) end t.rcov = true else t.rcov = false end t.rcov_opts = [ '--exclude', 'spec', '--exclude', '\\.rvm\\/gems', '--exclude', '1\\.8\\/gems', '--exclude', '1\\.9\\/gems', '--exclude', '\\.rvm', '--exclude', '\\/Library\\/Ruby' ] end Spec::Rake::SpecTask.new(:normal) do |t| t.spec_files = FileList['spec/**/*_spec.rb'] t.spec_opts = ['--color', '--format', 'specdoc'] t.rcov = false end if RCOV_ENABLED RCov::VerifyTask.new(:verify) do |t| t.threshold = 100.0 t.index_html = 'coverage/index.html' end task :verify => :rcov end desc 'Generate HTML Specdocs for all specs' Spec::Rake::SpecTask.new(:specdoc) do |t| specdoc_path = File.expand_path( File.join(File.dirname(__FILE__), '../specdoc/')) Dir.mkdir(specdoc_path) if !File.exist?(specdoc_path) output_file = File.join(specdoc_path, 'index.html') t.spec_files = FileList['spec/**/*_spec.rb'] t.spec_opts = ['--format', "\"html:#{output_file}\"", '--diff'] t.fail_on_error = false end namespace :rcov do desc 'Browse the code coverage report.' task :browse => 'spec:rcov' do require 'launchy' Launchy::Browser.run('coverage/index.html') end end end if RCOV_ENABLED desc 'Alias to spec:verify' task 'spec' => 'spec:verify' else desc 'Alias to spec:normal' task 'spec' => 'spec:normal' end task 'clobber' => ['spec:clobber_rcov'] autoparse-0.3.3/tasks/clobber.rake0000644000175000017500000000006013121477516020412 0ustar balasankarcbalasankarcdesc 'Remove all build products' task 'clobber' autoparse-0.3.3/tasks/metrics.rake0000644000175000017500000000113313121477516020452 0ustar balasankarcbalasankarcnamespace :metrics do task :lines do lines, codelines, total_lines, total_codelines = 0, 0, 0, 0 for file_name in FileList['lib/**/*.rb'] f = File.open(file_name) while line = f.gets lines += 1 next if line =~ /^\s*$/ next if line =~ /^\s*#/ codelines += 1 end puts "L: #{sprintf('%4d', lines)}, " + "LOC #{sprintf('%4d', codelines)} | #{file_name}" total_lines += lines total_codelines += codelines lines, codelines = 0, 0 end puts "Total: Lines #{total_lines}, LOC #{total_codelines}" end end