json-schema-0.19.1/0000755000175000017500000000000013374107723013376 5ustar boutilboutiljson-schema-0.19.1/json_schema.gemspec0000644000175000017500000000661713374107723017246 0ustar boutilboutil######################################################### # This file has been automatically generated by gem2tgz # ######################################################### # -*- encoding: utf-8 -*- # stub: json_schema 0.19.1 ruby lib Gem::Specification.new do |s| s.name = "json_schema".freeze s.version = "0.19.1" s.required_rubygems_version = Gem::Requirement.new(">= 0".freeze) if s.respond_to? :required_rubygems_version= s.require_paths = ["lib".freeze] s.authors = ["Brandur".freeze] s.date = "2018-03-28" s.email = ["brandur@mutelight.org".freeze] s.executables = ["validate-schema".freeze] s.files = ["LICENSE".freeze, "README.md".freeze, "bin/validate-schema".freeze, "lib/commands/validate_schema.rb".freeze, "lib/json_pointer.rb".freeze, "lib/json_pointer/evaluator.rb".freeze, "lib/json_reference.rb".freeze, "lib/json_schema.rb".freeze, "lib/json_schema/attributes.rb".freeze, "lib/json_schema/configuration.rb".freeze, "lib/json_schema/document_store.rb".freeze, "lib/json_schema/error.rb".freeze, "lib/json_schema/parser.rb".freeze, "lib/json_schema/reference_expander.rb".freeze, "lib/json_schema/schema.rb".freeze, "lib/json_schema/validator.rb".freeze, "schemas/hyper-schema.json".freeze, "schemas/schema.json".freeze, "test/bin_test.rb".freeze, "test/commands/validate_schema_test.rb".freeze, "test/data_scaffold.rb".freeze, "test/json_pointer/evaluator_test.rb".freeze, "test/json_reference/reference_test.rb".freeze, "test/json_schema/attribute_test.rb".freeze, "test/json_schema/document_store_test.rb".freeze, "test/json_schema/error_test.rb".freeze, "test/json_schema/parser_test.rb".freeze, "test/json_schema/reference_expander_test.rb".freeze, "test/json_schema/schema_test.rb".freeze, "test/json_schema/validator_test.rb".freeze, "test/json_schema_test.rb".freeze, "test/test_helper.rb".freeze] s.homepage = "https://github.com/brandur/json_schema".freeze s.licenses = ["MIT".freeze] s.rubygems_version = "2.7.6".freeze s.summary = "A JSON Schema V4 and Hyperschema V4 parser and validator.".freeze if s.respond_to? :specification_version then s.specification_version = 4 if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then s.add_development_dependency(%q.freeze, [">= 0"]) s.add_development_dependency(%q.freeze, ["~> 0.1"]) s.add_development_dependency(%q.freeze, ["~> 5.3"]) s.add_development_dependency(%q.freeze, [">= 0"]) s.add_development_dependency(%q.freeze, [">= 0"]) s.add_development_dependency(%q.freeze, ["~> 10.3"]) s.add_development_dependency(%q.freeze, [">= 0"]) else s.add_dependency(%q.freeze, [">= 0"]) s.add_dependency(%q.freeze, ["~> 0.1"]) s.add_dependency(%q.freeze, ["~> 5.3"]) s.add_dependency(%q.freeze, [">= 0"]) s.add_dependency(%q.freeze, [">= 0"]) s.add_dependency(%q.freeze, ["~> 10.3"]) s.add_dependency(%q.freeze, [">= 0"]) end else s.add_dependency(%q.freeze, [">= 0"]) s.add_dependency(%q.freeze, ["~> 0.1"]) s.add_dependency(%q.freeze, ["~> 5.3"]) s.add_dependency(%q.freeze, [">= 0"]) s.add_dependency(%q.freeze, [">= 0"]) s.add_dependency(%q.freeze, ["~> 10.3"]) s.add_dependency(%q.freeze, [">= 0"]) end end json-schema-0.19.1/schemas/0000755000175000017500000000000013374107723015021 5ustar boutilboutiljson-schema-0.19.1/schemas/schema.json0000644000175000017500000001042713374107723017160 0ustar boutilboutil{ "id": "http://json-schema.org/draft-04/schema#", "$schema": "http://json-schema.org/draft-04/schema#", "description": "Core schema meta-schema", "definitions": { "schemaArray": { "type": "array", "minItems": 1, "items": { "$ref": "#" } }, "positiveInteger": { "type": "integer", "minimum": 0 }, "positiveIntegerDefault0": { "allOf": [ { "$ref": "#/definitions/positiveInteger" }, { "default": 0 } ] }, "simpleTypes": { "enum": [ "array", "boolean", "integer", "null", "number", "object", "string" ] }, "stringArray": { "type": "array", "items": { "type": "string" }, "minItems": 1, "uniqueItems": true } }, "type": "object", "properties": { "id": { "type": "string", "format": "uri" }, "$schema": { "type": "string", "format": "uri" }, "title": { "type": "string" }, "description": { "type": "string" }, "default": {}, "multipleOf": { "type": "number", "minimum": 0, "exclusiveMinimum": true }, "maximum": { "type": "number" }, "exclusiveMaximum": { "type": "boolean", "default": false }, "minimum": { "type": "number" }, "exclusiveMinimum": { "type": "boolean", "default": false }, "maxLength": { "$ref": "#/definitions/positiveInteger" }, "minLength": { "$ref": "#/definitions/positiveIntegerDefault0" }, "pattern": { "type": "string", "format": "regex" }, "additionalItems": { "anyOf": [ { "type": "boolean" }, { "$ref": "#" } ], "default": {} }, "items": { "anyOf": [ { "$ref": "#" }, { "$ref": "#/definitions/schemaArray" } ], "default": {} }, "maxItems": { "$ref": "#/definitions/positiveInteger" }, "minItems": { "$ref": "#/definitions/positiveIntegerDefault0" }, "uniqueItems": { "type": "boolean", "default": false }, "maxProperties": { "$ref": "#/definitions/positiveInteger" }, "minProperties": { "$ref": "#/definitions/positiveIntegerDefault0" }, "required": { "$ref": "#/definitions/stringArray" }, "additionalProperties": { "anyOf": [ { "type": "boolean" }, { "$ref": "#" } ], "default": {} }, "definitions": { "type": "object", "additionalProperties": { "$ref": "#" }, "default": {} }, "properties": { "type": "object", "additionalProperties": { "$ref": "#" }, "default": {} }, "patternProperties": { "type": "object", "additionalProperties": { "$ref": "#" }, "default": {} }, "dependencies": { "type": "object", "additionalProperties": { "anyOf": [ { "$ref": "#" }, { "$ref": "#/definitions/stringArray" } ] } }, "enum": { "type": "array", "minItems": 1, "uniqueItems": true }, "type": { "anyOf": [ { "$ref": "#/definitions/simpleTypes" }, { "type": "array", "items": { "$ref": "#/definitions/simpleTypes" }, "minItems": 1, "uniqueItems": true } ] }, "allOf": { "$ref": "#/definitions/schemaArray" }, "anyOf": { "$ref": "#/definitions/schemaArray" }, "oneOf": { "$ref": "#/definitions/schemaArray" }, "not": { "$ref": "#" } }, "dependencies": { "exclusiveMaximum": [ "maximum" ], "exclusiveMinimum": [ "minimum" ] }, "default": {} } json-schema-0.19.1/schemas/hyper-schema.json0000644000175000017500000001132013374107723020276 0ustar boutilboutil{ "$schema": "http://json-schema.org/draft-04/hyper-schema#", "id": "http://json-schema.org/draft-04/hyper-schema#", "title": "JSON Hyper-Schema", "allOf": [ { "$ref": "http://json-schema.org/draft-04/schema#" } ], "properties": { "additionalItems": { "anyOf": [ { "type": "boolean" }, { "$ref": "#" } ] }, "additionalProperties": { "anyOf": [ { "type": "boolean" }, { "$ref": "#" } ] }, "dependencies": { "additionalProperties": { "anyOf": [ { "$ref": "#" }, { "type": "array" } ] } }, "items": { "anyOf": [ { "$ref": "#" }, { "$ref": "#/definitions/schemaArray" } ] }, "definitions": { "additionalProperties": { "$ref": "#" } }, "patternProperties": { "additionalProperties": { "$ref": "#" } }, "properties": { "additionalProperties": { "$ref": "#" } }, "allOf": { "$ref": "#/definitions/schemaArray" }, "anyOf": { "$ref": "#/definitions/schemaArray" }, "oneOf": { "$ref": "#/definitions/schemaArray" }, "not": { "$ref": "#" }, "links": { "type": "array", "items": { "$ref": "#/definitions/linkDescription" } }, "fragmentResolution": { "type": "string" }, "media": { "type": "object", "properties": { "type": { "description": "A media type, as described in RFC 2046", "type": "string" }, "binaryEncoding": { "description": "A content encoding scheme, as described in RFC 2045", "type": "string" } } }, "pathStart": { "description": "Instances' URIs must start with this value for this schema to apply to them", "type": "string", "format": "uri" } }, "definitions": { "schemaArray": { "type": "array", "items": { "$ref": "#" } }, "linkDescription": { "title": "Link Description Object", "type": "object", "required": [ "href", "rel" ], "properties": { "href": { "description": "a URI template, as defined by RFC 6570, with the addition of the $, ( and ) characters for pre-processing", "type": "string" }, "rel": { "description": "relation to the target resource of the link", "type": "string" }, "title": { "description": "a title for the link", "type": "string" }, "targetSchema": { "description": "JSON Schema describing the link target", "$ref": "#" }, "mediaType": { "description": "media type (as defined by RFC 2046) describing the link target", "type": "string" }, "method": { "description": "method for requesting the target of the link (e.g. for HTTP this might be \"GET\" or \"DELETE\")", "type": "string" }, "encType": { "description": "The media type in which to submit data along with the request", "type": "string", "default": "application/json" }, "schema": { "description": "Schema describing the data to submit along with the request", "$ref": "#" } } } }, "links": [ { "rel": "self", "href": "{+id}" }, { "rel": "full", "href": "{+($ref)}" } ] } json-schema-0.19.1/README.md0000644000175000017500000000366013374107723014662 0ustar boutilboutil# json_schema A JSON Schema V4 and Hyperschema V4 parser and validator. Validate some data based on a JSON Schema: ``` gem install json_schema validate-schema schema.json data.json ``` ## Programmatic ``` ruby require "json" require "json_schema" # parse the schema - raise SchemaError if it's invalid schema_data = JSON.parse(File.read("schema.json")) schema = JsonSchema.parse!(schema_data) # expand $ref nodes - raise SchemaError if unable to resolve schema.expand_references! # validate some data - raise ValidationError if it doesn't conform data = JSON.parse(File.read("data.json")) schema.validate!(data) # iterate through hyperschema links schema.links.each do |link| puts "#{link.method} #{link.href}" end # abort on first error, instead of listing them all: schema.validate!(data, fail_fast: true) ``` Errors have a `message` (for humans), and `type` (for machines). `ValidationError`s also include a `path`, a JSON pointer to the location in the supplied document which violated the schema. See [errors](docs/errors.md) for more info. Non-bang methods return a two-element array, with `true`/`false` at index 0 to indicate pass/fail, and an array of errors at index 1 (if any). Passing `fail_fast: true` (default: `false`) will cause the validator to abort on the first error it encounters and report just that. Even on fully valid data this can offer some speed improvement, since it doesn't have to collect error messages that might be later discarded (think of e.g. the `anyOf` directive). ## Development Run the test suite with: ``` rake ``` Or run specific suites or tests with: ``` ruby -Ilib -Itest test/json_schema/validator_test.rb ruby -Ilib -Itest test/json_schema/validator_test.rb -n /anyOf/ ``` ## Release 1. Update the version in `json_schema.gemspec` as appropriate for [semantic versioning](http://semver.org) and add details to `CHANGELOG`. 2. Run the `release` task: ``` bundle exec rake release ``` json-schema-0.19.1/lib/0000755000175000017500000000000013374107723014144 5ustar boutilboutiljson-schema-0.19.1/lib/json_pointer/0000755000175000017500000000000013374107723016655 5ustar boutilboutiljson-schema-0.19.1/lib/json_pointer/evaluator.rb0000644000175000017500000000423213374107723021205 0ustar boutilboutilmodule JsonPointer # Evaluates a JSON pointer within a JSON document. # # Note that this class is designed to evaluate references across a plain JSON # data object _or_ an instance of `JsonSchema::Schema`, so the constructor's # `data` argument can be of either type. class Evaluator def initialize(data) @data = data end def evaluate(original_path) path = original_path # the leading # can either be included or not path = path[1..-1] if path[0] == "#" # special case on "" or presumably "#" if path.empty? return @data end if path[0] != "/" raise ArgumentError, %{Path must begin with a leading "/": #{original_path}.} end path_parts = split(path) evaluate_segment(@data, path_parts) end private def evaluate_segment(data, path_parts) if path_parts.empty? data elsif data == nil # spec doesn't define how to handle this, so we'll return `nil` nil else key = transform_key(path_parts.shift) if data.is_a?(Array) unless key =~ /^\d+$/ raise ArgumentError, %{Key operating on an array must be a digit or "-": #{key}.} end evaluate_segment(data[key.to_i], path_parts) else evaluate_segment(data[key], path_parts) end end end # custom split method to account for blank segments def split(path) parts = [] last_index = 0 while index = path.index("/", last_index) if index == last_index parts << "" else parts << path[last_index...index] end last_index = index + 1 end # and also get that last segment parts << path[last_index..-1] # it should begin with a blank segment from the leading "/"; kill that parts.shift parts end def transform_key(key) # ~ has special meaning to JSON pointer to allow keys containing "/", so # perform some transformations first as defined by the spec # first as defined by the spec key = key.gsub('~1', '/') key = key.gsub('~0', '~') key end end end json-schema-0.19.1/lib/json_schema/0000755000175000017500000000000013374107723016435 5ustar boutilboutiljson-schema-0.19.1/lib/json_schema/document_store.rb0000644000175000017500000000141113374107723022011 0ustar boutilboutilmodule JsonSchema # The document store helps resolve URI-based JSON pointers by storing IDs # that we've seen in the schema. # # Each URI tuple also contains a pointer map that helps speed up expansions # that have already happened and handles cyclic dependencies. Store a # reference to the top-level schema before doing anything else. class DocumentStore include Enumerable def initialize @schema_map = {} end def add_schema(schema) raise ArgumentError, "can't add nil URI" if schema.uri.nil? uri = schema.uri.chomp('#') @schema_map[uri] = schema end def each @schema_map.each { |k, v| yield(k, v) } end def lookup_schema(uri) uri = uri.chomp('#') @schema_map[uri] end end end json-schema-0.19.1/lib/json_schema/parser.rb0000644000175000017500000003154313374107723020264 0ustar boutilboutilrequire_relative "../json_reference" require_relative "validator" module JsonSchema class Parser ALLOWED_TYPES = %w{any array boolean integer number null object string} BOOLEAN = [FalseClass, TrueClass] FORMATS = JsonSchema::Validator::DEFAULT_FORMAT_VALIDATORS.keys FRIENDLY_TYPES = { Array => "array", FalseClass => "boolean", Float => "number", Hash => "object", Integer => "integer", NilClass => "null", String => "string", TrueClass => "boolean", } attr_accessor :errors # Basic parsing of a schema. May return a malformed schema! (Use `#parse!` # to raise errors instead). def parse(data, parent = nil) # while #parse_data is recursed into for many schemas over the same # object, the @errors array is an instance-wide accumulator @errors = [] schema = parse_data(data, parent, "#") if @errors.count == 0 schema else nil end end def parse!(data, parent = nil) schema = parse(data, parent) if !schema raise AggregateError.new(@errors) end schema end private def build_uri(id, parent_uri) # kill any trailing slashes if id # may look like: http://json-schema.org/draft-04/hyper-schema# uri = URI.parse(id) # make sure there is no `#` suffix uri.fragment = nil # if id is defined as absolute, the schema's URI stays absolute if uri.absolute? || uri.path[0] == "/" uri.to_s.chomp("/") # otherwise build it according to the parent's URI elsif parent_uri # make sure we don't end up with duplicate slashes parent_uri = parent_uri.chomp("/") parent_uri + "/" + id else "/" end # if id is missing, it's defined as its parent schema's URI elsif parent_uri parent_uri else "/" end end def parse_additional_items(schema) if schema.additional_items # an object indicates a schema that will be used to parse any # items not listed in `items` if schema.additional_items.is_a?(Hash) schema.additional_items = parse_data( schema.additional_items, schema, "additionalItems" ) end # otherwise, leave as boolean end end def parse_additional_properties(schema) if schema.additional_properties # an object indicates a schema that will be used to parse any # properties not listed in `properties` if schema.additional_properties.is_a?(Hash) schema.additional_properties = parse_data( schema.additional_properties, schema, "additionalProperties" ) end # otherwise, leave as boolean end end def parse_all_of(schema) if schema.all_of schema.all_of = schema.all_of.each_with_index. map { |s, i| parse_data(s, schema, "allOf/#{i}") } end end def parse_any_of(schema) if schema.any_of schema.any_of = schema.any_of.each_with_index. map { |s, i| parse_data(s, schema, "anyOf/#{i}") } end end def parse_one_of(schema) if schema.one_of schema.one_of = schema.one_of.each_with_index. map { |s, i| parse_data(s, schema, "oneOf/#{i}") } end end def parse_data(data, parent, fragment) if !data.is_a?(Hash) # it would be nice to make this message more specific/nicer (at best it # points to the wrong schema) message = %{#{data.inspect} is not a valid schema.} @errors << SchemaError.new(parent, message, :schema_not_found) elsif ref = data["$ref"] schema = Schema.new schema.fragment = fragment schema.parent = parent schema.reference = JsonReference::Reference.new(ref) else schema = parse_schema(data, parent, fragment) end schema end def parse_definitions(schema) if schema.definitions # leave the original data reference intact schema.definitions = schema.definitions.dup schema.definitions.each do |key, definition| subschema = parse_data(definition, schema, "definitions/#{key}") schema.definitions[key] = subschema end end end def parse_dependencies(schema) if schema.dependencies # leave the original data reference intact schema.dependencies = schema.dependencies.dup schema.dependencies.each do |k, s| # may be Array, String (simple dependencies), or Hash (schema # dependency) if s.is_a?(Hash) schema.dependencies[k] = parse_data(s, schema, "dependencies") elsif s.is_a?(String) # just normalize all simple dependencies to arrays schema.dependencies[k] = [s] end end end end def parse_items(schema) if schema.items # tuple validation: an array of schemas if schema.items.is_a?(Array) schema.items = schema.items.each_with_index. map { |s, i| parse_data(s, schema, "items/#{i}") } # list validation: a single schema else schema.items = parse_data(schema.items, schema, "items") end end end def parse_links(schema) if schema.links schema.links = schema.links.each_with_index.map { |l, i| link = Schema::Link.new link.parent = schema link.fragment = "links/#{i}" link.data = l # any parsed schema is automatically expanded link.expanded = true link.uri = nil link.description = l["description"] link.enc_type = l["encType"] link.href = l["href"] link.method = l["method"] ? l["method"].downcase.to_sym : nil link.rel = l["rel"] link.title = l["title"] link.media_type = l["mediaType"] if l["schema"] link.schema = parse_data(l["schema"], schema, "links/#{i}/schema") end if l["targetSchema"] link.target_schema = parse_data(l["targetSchema"], schema, "links/#{i}/targetSchema") end link } end end def parse_media(schema) if data = schema.media schema.media = Schema::Media.new schema.media.binary_encoding = data["binaryEncoding"] schema.media.type = data["type"] end end def parse_not(schema) if schema.not schema.not = parse_data(schema.not, schema, "not") end end def parse_pattern_properties(schema) if schema.pattern_properties # leave the original data reference intact properties = schema.pattern_properties.dup properties = properties.map do |k, s| [parse_regex(schema, k), parse_data(s, schema, "patternProperties/#{k}")] end schema.pattern_properties = Hash[*properties.flatten] end end def parse_regex(schema, regex) case JsonSchema.configuration.validate_regex_with when :'ecma-re-validator' unless EcmaReValidator.valid?(regex) message = %{#{regex.inspect} is not an ECMA-262 regular expression.} @errors << SchemaError.new(schema, message, :regex_failed) end end Regexp.new(regex) end def parse_properties(schema) # leave the original data reference intact schema.properties = schema.properties.dup if schema.properties && schema.properties.is_a?(Hash) schema.properties.each do |key, definition| subschema = parse_data(definition, schema, "properties/#{key}") schema.properties[key] = subschema end end end def parse_schema(data, parent, fragment) schema = Schema.new schema.fragment = fragment schema.parent = parent schema.data = data schema.id = validate_type(schema, [String], "id") # any parsed schema is automatically expanded schema.expanded = true # build URI early so we can reference it in errors schema.uri = build_uri(schema.id, parent ? parent.uri : nil) schema.title = validate_type(schema, [String], "title") schema.description = validate_type(schema, [String], "description") schema.default = schema.data["default"] # validation: any schema.all_of = validate_type(schema, [Array], "allOf") || [] schema.any_of = validate_type(schema, [Array], "anyOf") || [] schema.definitions = validate_type(schema, [Hash], "definitions") || {} schema.enum = validate_type(schema, [Array], "enum") schema.one_of = validate_type(schema, [Array], "oneOf") || [] schema.not = validate_type(schema, [Hash], "not") schema.type = validate_type(schema, [Array, String], "type") schema.type = [schema.type] if schema.type.is_a?(String) validate_known_type!(schema) # validation: array schema.additional_items = validate_type(schema, BOOLEAN + [Hash], "additionalItems") schema.items = validate_type(schema, [Array, Hash], "items") schema.max_items = validate_type(schema, [Integer], "maxItems") schema.min_items = validate_type(schema, [Integer], "minItems") schema.unique_items = validate_type(schema, BOOLEAN, "uniqueItems") # validation: number/integer schema.max = validate_type(schema, [Float, Integer], "maximum") schema.max_exclusive = validate_type(schema, BOOLEAN, "exclusiveMaximum") schema.min = validate_type(schema, [Float, Integer], "minimum") schema.min_exclusive = validate_type(schema, BOOLEAN, "exclusiveMinimum") schema.multiple_of = validate_type(schema, [Float, Integer], "multipleOf") # validation: object schema.additional_properties = validate_type(schema, BOOLEAN + [Hash], "additionalProperties") schema.dependencies = validate_type(schema, [Hash], "dependencies") || {} schema.max_properties = validate_type(schema, [Integer], "maxProperties") schema.min_properties = validate_type(schema, [Integer], "minProperties") schema.pattern_properties = validate_type(schema, [Hash], "patternProperties") || {} schema.properties = validate_type(schema, [Hash], "properties") || {} schema.required = validate_type(schema, [Array], "required") schema.strict_properties = validate_type(schema, BOOLEAN, "strictProperties") # validation: string schema.format = validate_type(schema, [String], "format") schema.max_length = validate_type(schema, [Integer], "maxLength") schema.min_length = validate_type(schema, [Integer], "minLength") schema.pattern = validate_type(schema, [String], "pattern") schema.pattern = parse_regex(schema, schema.pattern) if schema.pattern validate_format(schema, schema.format) if schema.format # hyperschema schema.links = validate_type(schema, [Array], "links") schema.media = validate_type(schema, [Hash], "media") schema.path_start = validate_type(schema, [String], "pathStart") schema.read_only = validate_type(schema, BOOLEAN, "readOnly") parse_additional_items(schema) parse_additional_properties(schema) parse_all_of(schema) parse_any_of(schema) parse_one_of(schema) parse_definitions(schema) parse_dependencies(schema) parse_items(schema) parse_links(schema) parse_media(schema) parse_not(schema) parse_pattern_properties(schema) parse_properties(schema) schema end def validate_known_type!(schema) if schema.type if !(bad_types = schema.type - ALLOWED_TYPES).empty? message = %{Unknown types: #{bad_types.sort.join(", ")}.} @errors << SchemaError.new(schema, message, :unknown_type) end end end def validate_type(schema, types, field) friendly_types = types.map { |t| FRIENDLY_TYPES[t] || t }.sort.uniq.join("/") value = schema.data[field] if !value.nil? && !types.any? { |t| value.is_a?(t) } message = %{#{value.inspect} is not a valid "#{field}", must be a #{friendly_types}.} @errors << SchemaError.new(schema, message, :invalid_type) nil else value end end def validate_format(schema, format) valid_formats = FORMATS + JsonSchema.configuration.custom_formats.keys return if valid_formats.include?(format) message = %{#{format.inspect} is not a valid format, must be one of #{valid_formats.join(', ')}.} @errors << SchemaError.new(schema, message, :unknown_format) end end end json-schema-0.19.1/lib/json_schema/error.rb0000644000175000017500000000340013374107723020110 0ustar boutilboutilmodule JsonSchema class Error < RuntimeError end class AggregateError < Error attr_accessor :errors def initialize(errors) @errors = errors end def to_s @errors.join(" ") end end class SchemaError < Error attr_accessor :message, :schema, :type def self.aggregate(errors) errors.map(&:to_s) end def initialize(schema, message, type) @schema = schema @message = message @type = type end def to_s if schema && schema.pointer "#{schema.pointer}: #{message}" else message end end end class ValidationError < SchemaError attr_accessor :data, :path, :sub_errors def initialize(schema, path, message, type, options = {}) super(schema, message, type) @path = path # TODO: change to named optional arguments when Ruby 1.9 support is # removed @data = options[:data] @sub_errors = options[:sub_errors] end def pointer path.join("/") end def to_s "#{pointer}: failed schema #{schema.pointer}: #{message}" end end module ErrorFormatter def to_list(list) words_connector = ', ' two_words_connector = ' or ' last_word_connector = ', or ' length = list.length joined_list = case length when 1 list[0] when 2 "#{list[0]}#{two_words_connector}#{list[1]}" else "#{list[0...-1].join(words_connector)}#{last_word_connector}#{list[-1]}" end if joined_list[0] =~ /^[aeiou]/ "an #{joined_list}" else "a #{joined_list}" end end module_function :to_list end end json-schema-0.19.1/lib/json_schema/attributes.rb0000644000175000017500000001000013374107723021137 0ustar boutilboutilmodule JsonSchema # Attributes mixes in some useful attribute-related methods for use in # defining schema classes in a spirit similar to Ruby's attr_accessor and # friends. module Attributes # Provides class-level methods for the Attributes module. module ClassMethods # Attributes that should be copied between classes when invoking # Attributes#copy_from. # # Hash contains instance variable names mapped to a default value for the # field. attr_reader :copyable_attrs # Attributes that are part of the JSON schema and hyper-schema # specifications. These are allowed to be accessed with the [] operator. # # Hash contains the access key mapped to the name of the method that should # be invoked to retrieve a value. For example, `type` maps to `type` and # `additionalItems` maps to `additional_items`. attr_reader :schema_attrs # identical to attr_accessible, but allows us to copy in values from a # target schema to help preserve our hierarchy during reference expansion def attr_copyable(attr, options = {}) attr_accessor(attr) ref = :"@#{attr}" # Usually the default being assigned here is nil. self.copyable_attrs[ref] = options[:default] if default = options[:default] # remove the reader already created by attr_accessor remove_method(attr) need_dup = [Array, Hash, Set].include?(default.class) define_method(attr) do val = instance_variable_get(ref) if !val.nil? val else need_dup ? default.class.new : default end end end if options[:clear_cache] remove_method(:"#{attr}=") define_method(:"#{attr}=") do |value| instance_variable_set(options[:clear_cache], nil) instance_variable_set(ref, value) end end end def attr_schema(attr, options = {}) attr_copyable(attr, :default => options[:default], :clear_cache => options[:clear_cache]) self.schema_attrs[options[:schema_name] || attr] = attr end # Directive indicating that attributes should be inherited from a parent # class. # # Must appear as first statement in class that mixes in (or whose parent # mixes in) the Attributes module. def inherit_attrs @copyable_attrs = self.superclass.instance_variable_get(:@copyable_attrs).dup @schema_attrs = self.superclass.instance_variable_get(:@schema_attrs).dup end # Initializes some class instance variables required to make other # methods in the Attributes module work. Run automatically when the # module is mixed into another class. def initialize_attrs @copyable_attrs = {} @schema_attrs = {} end end def self.included(klass) klass.extend(ClassMethods) klass.send(:initialize_attrs) end # Allows the values of schema attributes to be accessed with a symbol or a # string. So for example, the value of `schema.additional_items` could be # procured with `schema[:additionalItems]`. This only works for attributes # that are part of the JSON schema specification; other methods on the # class are not available (e.g. `expanded`.) # # This is implemented so that `JsonPointer::Evaluator` can evaluate a # reference on an sintance of this class (as well as plain JSON data). def [](name) name = name.to_sym if self.class.schema_attrs.key?(name) send(self.class.schema_attrs[name]) else raise NoMethodError, "Schema does not respond to ##{name}" end end def copy_from(schema) self.class.copyable_attrs.each do |copyable, _| instance_variable_set(copyable, schema.instance_variable_get(copyable)) end end def initialize_attrs self.class.copyable_attrs.each do |attr, _| instance_variable_set(attr, nil) end end end end json-schema-0.19.1/lib/json_schema/reference_expander.rb0000644000175000017500000002150413374107723022610 0ustar boutilboutilrequire "set" module JsonSchema class ReferenceExpander attr_accessor :errors attr_accessor :store def expand(schema, options = {}) @errors = [] @local_store = DocumentStore.new @schema = schema @schema_paths = {} @store = options[:store] || DocumentStore.new # If the given JSON schema is _just_ a JSON reference and nothing else, # short circuit the whole expansion process and return the result. if schema.reference && !schema.expanded? return dereference(schema, []) end @uri = URI.parse(schema.uri) @store.each do |uri, store_schema| build_schema_paths(uri, store_schema) end # we run #to_s on lookup for URIs; the #to_s of nil is "" build_schema_paths("", schema) traverse_schema(schema) refs = unresolved_refs(schema).sort if refs.count > 0 message = %{Couldn't resolve references: #{refs.to_a.join(", ")}.} @errors << SchemaError.new(schema, message, :unresolved_references) end @errors.count == 0 end def expand!(schema, options = {}) if !expand(schema, options) raise AggregateError.new(@errors) end true end private def add_reference(schema) uri = URI.parse(schema.uri) # In case we've already added a schema for the same reference, don't # re-add it unless the new schema's pointer path is shorter than the one # we've already stored. stored_schema = lookup_reference(uri) if stored_schema && stored_schema.pointer.length < schema.pointer.length return end if uri.absolute? @store.add_schema(schema) else @local_store.add_schema(schema) end end def build_schema_paths(uri, schema) return if schema.reference paths = @schema_paths[uri] ||= {} paths[schema.pointer] = schema schema_children(schema).each do |subschema| build_schema_paths(uri, subschema) end # Also insert alternate tree for schema's custom URI. O(crazy). if schema.uri != uri fragment, parent = schema.fragment, schema.parent schema.fragment, schema.parent = "#", nil build_schema_paths(schema.uri, schema) schema.fragment, schema.parent = fragment, parent end end def dereference(ref_schema, ref_stack) ref = ref_schema.reference # detects a reference cycle if ref_stack.include?(ref) message = %{Reference loop detected: #{ref_stack.sort.join(", ")}.} @errors << SchemaError.new(ref_schema, message, :loop_detected) return false end new_schema = resolve_reference(ref_schema) return false unless new_schema # if the reference resolved to a new reference we need to continue # dereferencing until we either hit a non-reference schema, or a # reference which is already resolved if new_schema.reference && !new_schema.expanded? success = dereference(new_schema, ref_stack + [ref]) return false unless success end # copy new schema into existing one while preserving parent, fragment, # and reference parent = ref_schema.parent ref_schema.copy_from(new_schema) ref_schema.parent = parent # correct all parent references to point back to ref_schema instead of # new_schema if ref_schema.original? schema_children(ref_schema).each do |schema| schema.parent = ref_schema end end true end def lookup_pointer(uri, pointer) paths = @schema_paths[uri.to_s] ||= {} paths[pointer] end def lookup_reference(uri) if uri.absolute? @store.lookup_schema(uri.to_s) else @local_store.lookup_schema(uri.to_s) end end def resolve_pointer(ref_schema, resolved_schema) ref = ref_schema.reference if !(new_schema = lookup_pointer(ref.uri, ref.pointer)) new_schema = JsonPointer.evaluate(resolved_schema, ref.pointer) # couldn't resolve pointer within known schema; that's an error if new_schema.nil? message = %{Couldn't resolve pointer "#{ref.pointer}".} @errors << SchemaError.new(resolved_schema, message, :unresolved_pointer) return end # Try to aggressively detect a circular dependency in case of another # reference. See: # # https://github.com/brandur/json_schema/issues/50 # if new_schema.reference && new_new_schema = lookup_pointer(ref.uri, new_schema.reference.pointer) new_new_schema.clones << ref_schema else # Parse a new schema and use the same parent node. Basically this is # exclusively for the case of a reference that needs to be # de-referenced again to be resolved. build_schema_paths(ref.uri, resolved_schema) end else # insert a clone record so that the expander knows to expand it when # the schema traversal is finished new_schema.clones << ref_schema end new_schema end def resolve_reference(ref_schema) ref = ref_schema.reference uri = ref.uri if uri && uri.host scheme = uri.scheme || "http" # allow resolution if something we've already parsed has claimed the # full URL if @store.lookup_schema(uri.to_s) resolve_uri(ref_schema, uri) else message = %{Reference resolution over #{scheme} is not currently supported (URI: #{uri}).} @errors << SchemaError.new(ref_schema, message, :scheme_not_supported) nil end # absolute elsif uri && uri.path[0] == "/" resolve_uri(ref_schema, uri) # relative elsif uri # Build an absolute path using the URI of the current schema. # # Note that this code path will never currently be hit because the # incoming reference schema will never have a URI. if ref_schema.uri schema_uri = ref_schema.uri.chomp("/") resolve_uri(ref_schema, URI.parse(schema_uri + "/" + uri.path)) else nil end # just a JSON Pointer -- resolve against schema root else resolve_pointer(ref_schema, @schema) end end def resolve_uri(ref_schema, uri) if schema = lookup_reference(uri) resolve_pointer(ref_schema, schema) else message = %{Couldn't resolve URI: #{uri.to_s}.} @errors << SchemaError.new(ref_schema, message, :unresolved_pointer) nil end end def schema_children(schema) Enumerator.new do |yielder| schema.all_of.each { |s| yielder << s } schema.any_of.each { |s| yielder << s } schema.one_of.each { |s| yielder << s } schema.definitions.each { |_, s| yielder << s } schema.pattern_properties.each { |_, s| yielder << s } schema.properties.each { |_, s| yielder << s } if additional = schema.additional_properties if additional.is_a?(Schema) yielder << additional end end if schema.not yielder << schema.not end # can either be a single schema (list validation) or multiple (tuple # validation) if items = schema.items if items.is_a?(Array) items.each { |s| yielder << s } else yielder << items end end # dependencies can either be simple or "schema"; only replace the # latter schema.dependencies.values. select { |s| s.is_a?(Schema) }. each { |s| yielder << s } # schemas contained inside hyper-schema links objects if schema.links schema.links.map { |l| [l.schema, l.target_schema] }. flatten. compact. each { |s| yielder << s } end end end def unresolved_refs(schema) # prevent endless recursion return [] unless schema.original? schema_children(schema).reduce([]) do |arr, subschema| if !subschema.expanded? arr += [subschema.reference] else arr += unresolved_refs(subschema) end end end def traverse_schema(schema) add_reference(schema) schema_children(schema).each do |subschema| if subschema.reference && !subschema.expanded? dereference(subschema, []) end if !subschema.reference traverse_schema(subschema) end end # after finishing a schema traversal, find all clones and re-hydrate them if schema.original? schema.clones.each do |clone_schema| parent = clone_schema.parent clone_schema.copy_from(schema) clone_schema.parent = parent end end end end end json-schema-0.19.1/lib/json_schema/validator.rb0000644000175000017500000005311413374107723020753 0ustar boutilboutilrequire "uri" module JsonSchema class Validator attr_accessor :errors def initialize(schema) @schema = schema end def validate(data, fail_fast: false) @errors = [] @visits = {} @fail_fast = fail_fast # This dynamically creates the "strict_or_fast_and" method which is used # throughout the validator to combine the previous validation result with # another validation check. # Logic wise, we could simply define this method without meta programming # and decide every time to either call fast_and or strict_end. # Unfortunately this has a small overhead, that adds up over the runtime # of the validator – about 5% if we check @fail_fast everytime. # For more details, please see https://github.com/brandur/json_schema/pull/96 and_operation = method(@fail_fast ? :fast_and : :strict_and) define_singleton_method(:strict_or_fast_and, and_operation) catch(:fail_fast) do validate_data(@schema, data, @errors, ['#']) end @errors.size == 0 end def validate!(data, fail_fast: false) if !validate(data, fail_fast: fail_fast) raise AggregateError.new(@errors) end end private def first_visit(schema, errors, path) true # removed until more comprehensive testing can be performed .. this is # currently causing validation loop detections to go off on all non-trivial # schemas =begin key = "#{schema.object_id}-#{schema.pointer}-#{path.join("/")}" if !@visits.key?(key) @visits[key] = true true else message = %{Validation loop detected.} errors << ValidationError.new(schema, path, message, :loop_detected) false end =end end # for use with additionalProperties and strictProperties def get_extra_keys(schema, data) extra = data.keys - schema.properties.keys if schema.pattern_properties schema.pattern_properties.keys.each do |pattern| extra -= extra.select { |k| k =~ pattern } end end extra end # works around &&'s "lazy" behavior def strict_and(valid_old, valid_new) valid_old && valid_new end def fast_and(valid_old, valid_new) throw :fail_fast, false if !valid_new valid_old && valid_new end def validate_data(schema, data, errors, path) valid = true # detect a validation loop if !first_visit(schema, errors, path) return false end # validation: any valid = strict_or_fast_and valid, validate_all_of(schema, data, errors, path) valid = strict_or_fast_and valid, validate_any_of(schema, data, errors, path) valid = strict_or_fast_and valid, validate_enum(schema, data, errors, path) valid = strict_or_fast_and valid, validate_one_of(schema, data, errors, path) valid = strict_or_fast_and valid, validate_not(schema, data, errors, path) valid = strict_or_fast_and valid, validate_type(schema, data, errors, path) # validation: array if data.is_a?(Array) valid = strict_or_fast_and valid, validate_items(schema, data, errors, path) valid = strict_or_fast_and valid, validate_max_items(schema, data, errors, path) valid = strict_or_fast_and valid, validate_min_items(schema, data, errors, path) valid = strict_or_fast_and valid, validate_unique_items(schema, data, errors, path) end # validation: integer/number if data.is_a?(Float) || data.is_a?(Integer) valid = strict_or_fast_and valid, validate_max(schema, data, errors, path) valid = strict_or_fast_and valid, validate_min(schema, data, errors, path) valid = strict_or_fast_and valid, validate_multiple_of(schema, data, errors, path) end # validation: object if data.is_a?(Hash) valid = strict_or_fast_and valid, validate_additional_properties(schema, data, errors, path) valid = strict_or_fast_and valid, validate_dependencies(schema, data, errors, path) valid = strict_or_fast_and valid, validate_max_properties(schema, data, errors, path) valid = strict_or_fast_and valid, validate_min_properties(schema, data, errors, path) valid = strict_or_fast_and valid, validate_pattern_properties(schema, data, errors, path) valid = strict_or_fast_and valid, validate_properties(schema, data, errors, path) valid = strict_or_fast_and valid, validate_required(schema, data, errors, path, schema.required) valid = strict_or_fast_and valid, validate_strict_properties(schema, data, errors, path) end # validation: string if data.is_a?(String) valid = strict_or_fast_and valid, validate_format(schema, data, errors, path) valid = strict_or_fast_and valid, validate_max_length(schema, data, errors, path) valid = strict_or_fast_and valid, validate_min_length(schema, data, errors, path) valid = strict_or_fast_and valid, validate_pattern(schema, data, errors, path) end valid end def validate_additional_properties(schema, data, errors, path) return true if schema.additional_properties == true # schema indicates that all properties not in `properties` should be # validated according to subschema if schema.additional_properties.is_a?(Schema) extra = get_extra_keys(schema, data) validations = extra.map do |key| validate_data(schema.additional_properties, data[key], errors, path + [key]) end # true only if all keys validate validations.all? # boolean indicates whether additional properties are allowed else validate_extra(schema, data, errors, path) end end def validate_all_of(schema, data, errors, path) return true if schema.all_of.empty? # We've kept this feature behind a configuration flag for now because # there is some performance implication to producing each sub error. # Normally we can short circuit the validation after encountering only # one problem, but here we have to evaluate all subschemas every time. if JsonSchema.configuration.all_of_sub_errors && !@fail_fast sub_errors = [] valid = schema.all_of.map do |subschema| current_sub_errors = [] sub_errors << current_sub_errors validate_data(subschema, data, current_sub_errors, path) end.all? else sub_errors = nil valid = schema.all_of.all? do |subschema| validate_data(subschema, data, errors, path) end end message = %{Not all subschemas of "allOf" matched.} errors << ValidationError.new(schema, path, message, :all_of_failed, sub_errors: sub_errors, data: data) if !valid valid end def validate_any_of(schema, data, errors, path) return true if schema.any_of.empty? sub_errors = schema.any_of.map do |subschema| current_sub_errors = [] valid = catch(:fail_fast) do validate_data(subschema, data, current_sub_errors, path) end return true if valid current_sub_errors end message = %{No subschema in "anyOf" matched.} errors << ValidationError.new(schema, path, message, :any_of_failed, sub_errors: sub_errors, data: data) false end def validate_dependencies(schema, data, errors, path) return true if schema.dependencies.empty? result = schema.dependencies.map do |key, obj| # if the key is not present, the dependency is fulfilled by definition next true unless data[key] if obj.is_a?(Schema) validate_data(obj, data, errors, path) else # if not a schema, value is an array of required fields validate_required(schema, data, errors, path, obj) end end result.all? end def validate_format(schema, data, errors, path) return true unless schema.format validator = ( JsonSchema.configuration.custom_formats[schema.format] || DEFAULT_FORMAT_VALIDATORS[schema.format] ) if validator[data] true else message = %{#{data} is not a valid #{schema.format}.} errors << ValidationError.new(schema, path, message, :invalid_format, data: data) false end end def validate_enum(schema, data, errors, path) return true unless schema.enum if schema.enum.include?(data) true else message = %{#{data} is not a member of #{schema.enum}.} errors << ValidationError.new(schema, path, message, :invalid_type, data: data) false end end def validate_extra(schema, data, errors, path) extra = get_extra_keys(schema, data) if extra.empty? true else message = %{"#{extra.sort.join('", "')}" } + (extra.length == 1 ? "is not a" : "are not") + %{ permitted key} + (extra.length == 1 ? "." : "s.") errors << ValidationError.new(schema, path, message, :invalid_keys) false end end def validate_items(schema, data, errors, path) return true unless schema.items if schema.items.is_a?(Array) if data.size < schema.items.count message = %{#{schema.items.count} item} + (schema.items.count == 1 ? "" : "s") + %{ required; only #{data.size} } + (data.size == 1 ? "was" : "were") + %{ supplied.} errors << ValidationError.new(schema, path, message, :min_items_failed, data: data) false elsif data.size > schema.items.count && !schema.additional_items? message = %{No more than #{schema.items.count} item} + (schema.items.count == 1 ? " is" : "s are") + %{ allowed; #{data.size} } + (data.size > 1 ? "were" : "was") + %{ supplied.} errors << ValidationError.new(schema, path, message, :max_items_failed, data: data) false else valid = true if data.size > schema.items.count && schema.additional_items.is_a?(Schema) (schema.items.count..data.count - 1).each do |i| valid = strict_or_fast_and valid, validate_data(schema.additional_items, data[i], errors, path + [i]) end end schema.items.each_with_index do |subschema, i| valid = strict_or_fast_and valid, validate_data(subschema, data[i], errors, path + [i]) end valid end else valid = true data.each_with_index do |value, i| valid = strict_or_fast_and valid, validate_data(schema.items, value, errors, path + [i]) end valid end end def validate_max(schema, data, errors, path) return true unless schema.max if schema.max_exclusive? && data < schema.max true elsif !schema.max_exclusive? && data <= schema.max true else message = %{#{data} must be less than} + (schema.max_exclusive? ? "" : " or equal to") + %{ #{schema.max}.} errors << ValidationError.new(schema, path, message, :max_failed, data: data) false end end def validate_max_items(schema, data, errors, path) return true unless schema.max_items if data.size <= schema.max_items true else message = %{No more than #{schema.max_items} item} + (schema.max_items == 1 ? " is" : "s are") + %{ allowed; #{data.size} } + (data.size == 1 ? "was" : "were")+ %{ supplied.} errors << ValidationError.new(schema, path, message, :max_items_failed, data: data) false end end def validate_max_length(schema, data, errors, path) return true unless schema.max_length if data.length <= schema.max_length true else message = %{Only #{schema.max_length} character} + (schema.max_length == 1 ? " is" : "s are") + %{ allowed; #{data.length} } + (data.length == 1 ? "was" : "were") + %{ supplied.} errors << ValidationError.new(schema, path, message, :max_length_failed, data: data) false end end def validate_max_properties(schema, data, errors, path) return true unless schema.max_properties if data.keys.size <= schema.max_properties true else message = %{No more than #{schema.max_properties} propert} + (schema.max_properties == 1 ? "y is" : "ies are") + %{ allowed; #{data.keys.size} } + (data.keys.size == 1 ? "was" : "were") + %{ supplied.} errors << ValidationError.new(schema, path, message, :max_properties_failed, data: data) false end end def validate_min(schema, data, errors, path) return true unless schema.min if schema.min_exclusive? && data > schema.min true elsif !schema.min_exclusive? && data >= schema.min true else message = %{#{data} must be greater than} + (schema.min_exclusive? ? "" : " or equal to") + %{ #{schema.min}.} errors << ValidationError.new(schema, path, message, :min_failed, data: data) false end end def validate_min_items(schema, data, errors, path) return true unless schema.min_items if data.size >= schema.min_items true else message = %{#{schema.min_items} item} + (schema.min_items == 1 ? "" : "s") + %{ required; only #{data.size} } + (data.size == 1 ? "was" : "were") + %{ supplied.} errors << ValidationError.new(schema, path, message, :min_items_failed, data: data) false end end def validate_min_length(schema, data, errors, path) return true unless schema.min_length if data.length >= schema.min_length true else message = %{At least #{schema.min_length} character} + (schema.min_length == 1 ? " is" : "s are") + %{ required; only #{data.length} } + (data.length == 1 ? "was" : "were") + %{ supplied.} errors << ValidationError.new(schema, path, message, :min_length_failed, data: data) false end end def validate_min_properties(schema, data, errors, path) return true unless schema.min_properties if data.keys.size >= schema.min_properties true else message = %{At least #{schema.min_properties} propert}+ (schema.min_properties == 1 ? "y is" : "ies are") + %{ required; #{data.keys.size} }+ (data.keys.size == 1 ? "was" : "were") + %{ supplied.} errors << ValidationError.new(schema, path, message, :min_properties_failed, data: data) false end end def validate_multiple_of(schema, data, errors, path) return true unless schema.multiple_of if data % schema.multiple_of == 0 true else message = %{#{data} is not a multiple of #{schema.multiple_of}.} errors << ValidationError.new(schema, path, message, :multiple_of_failed, data: data) false end end def validate_one_of(schema, data, errors, path) return true if schema.one_of.empty? sub_errors = [] num_valid = schema.one_of.count do |subschema| current_sub_errors = [] valid = catch(:fail_fast) do validate_data(subschema, data, current_sub_errors, path) end sub_errors << current_sub_errors valid end return true if num_valid == 1 message = if num_valid == 0 %{No subschema in "oneOf" matched.} else %{More than one subschema in "oneOf" matched.} end errors << ValidationError.new(schema, path, message, :one_of_failed, sub_errors: sub_errors, data: data) false end def validate_not(schema, data, errors, path) return true unless schema.not # don't bother accumulating these errors, they'll all be worded # incorrectly for the inverse condition valid = !validate_data(schema.not, data, [], path) if !valid message = %{Matched "not" subschema.} errors << ValidationError.new(schema, path, message, :not_failed, data: data) end valid end def validate_pattern(schema, data, errors, path) return true unless schema.pattern if data =~ schema.pattern true else message = %{#{data} does not match #{schema.pattern.inspect}.} errors << ValidationError.new(schema, path, message, :pattern_failed, data: data) false end end def validate_pattern_properties(schema, data, errors, path) return true if schema.pattern_properties.empty? valid = true schema.pattern_properties.each do |pattern, subschema| data.each do |key, value| if key =~ pattern valid = strict_or_fast_and valid, validate_data(subschema, value, errors, path + [key]) end end end valid end def validate_properties(schema, data, errors, path) return true if schema.properties.empty? valid = true schema.properties.each do |key, subschema| next unless data.key?(key) valid = strict_or_fast_and valid, validate_data(subschema, data[key], errors, path + [key]) end valid end def validate_required(schema, data, errors, path, required) return true if !required || required.empty? if (missing = required - data.keys).empty? true else message = %{"#{missing.sort.join('", "')}" } + (missing.length == 1 ? "wasn't" : "weren't") + %{ supplied.} errors << ValidationError.new(schema, path, message, :required_failed, data: missing) false end end def validate_strict_properties(schema, data, errors, path) return true if !schema.strict_properties strict_or_fast_and validate_extra(schema, data, errors, path), validate_required(schema, data, errors, path, schema.properties.keys) end def validate_type(schema, data, errors, path) return true if !schema.type || schema.type.empty? if schema.type_parsed.any? { |t| data.is_a?(t) } true else key = find_parent(schema) message = %{For '#{key}', #{data.inspect} is not #{ErrorFormatter.to_list(schema.type)}.} errors << ValidationError.new(schema, path, message, :invalid_type, data: data) false end end def validate_unique_items(schema, data, errors, path) return true unless schema.unique_items? if data.size == data.uniq.size true else message = %{Duplicate items are not allowed.} errors << ValidationError.new(schema, path, message, :unique_items_failed, data: data) false end end def find_parent(schema) fragment = schema.fragment key = if fragment =~ /patternProperties/ split_pointer = schema.pointer.split("/") idx = split_pointer.index("patternProperties") # this join mimics the fragment format below in that it's # parent + key if idx - 2 >= 0 parts = split_pointer[(idx - 2)..(idx - 1)] end # protect against a `nil` that could occur if # `patternProperties` has no parent parts ? parts.compact.join("/") : nil end key || fragment end DEFAULT_FORMAT_VALIDATORS = { "date" => ->(data) { data =~ DATE_PATTERN }, "date-time" => ->(data) { data =~ DATE_TIME_PATTERN }, "email" => ->(data) { data =~ EMAIL_PATTERN }, "hostname" => ->(data) { data =~ HOSTNAME_PATTERN }, "ipv4" => ->(data) { data =~ IPV4_PATTERN }, "ipv6" => ->(data) { data =~ IPV6_PATTERN }, "regex" => ->(data) { Regexp.new(data) rescue false }, "uri" => ->(data) { URI.parse(data) rescue false }, # From the spec: a string instance is valid URI Reference (either a URI # or a relative-reference), according to RFC3986. # # URI.parse will a handle a relative reference as well as an absolute # one. Really though we should try to make "uri" more restrictive, and # both of these could do to be more robust. "uri-reference" => ->(data) { URI.parse(data) rescue false }, "uuid" => ->(data) { data =~ UUID_PATTERN }, }.freeze EMAIL_PATTERN = /^[A-Z0-9._%+-]+@[A-Z0-9.-]+\.[A-Z]+$/i HOSTNAME_PATTERN = /^(?=.{1,255}$)[0-9A-Za-z](?:(?:[0-9A-Za-z]|-){0,61}[0-9A-Za-z])?(?:\.[0-9A-Za-z](?:(?:[0-9A-Za-z]|-){0,61}[0-9A-Za-z])?)*\.?$/ DATE_PATTERN = /^[0-9]{4}-[0-9]{2}-[0-9]{2}$/ DATE_TIME_PATTERN = /^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-2][0-9]:[0-5][0-9]:[0-5][0-9](\.[0-9]+)?(Z|[\-+][0-9]{2}:[0-5][0-9])$/ # from: http://stackoverflow.com/a/17871737 IPV4_PATTERN = /^((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])$/ # from: http://stackoverflow.com/a/17871737 IPV6_PATTERN = /^(([0-9a-fA-F]{1,4}:){7,7}[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,7}:|([0-9a-fA-F]{1,4}:){1,6}:[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,5}(:[0-9a-fA-F]{1,4}){1,2}|([0-9a-fA-F]{1,4}:){1,4}(:[0-9a-fA-F]{1,4}){1,3}|([0-9a-fA-F]{1,4}:){1,3}(:[0-9a-fA-F]{1,4}){1,4}|([0-9a-fA-F]{1,4}:){1,2}(:[0-9a-fA-F]{1,4}){1,5}|[0-9a-fA-F]{1,4}:((:[0-9a-fA-F]{1,4}){1,6})|:((:[0-9a-fA-F]{1,4}){1,7}|:)|fe80:(:[0-9a-fA-F]{0,4}){0,4}%[0-9a-zA-Z]{1,}|::(ffff(:0{1,4}){0,1}:){0,1}((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9]).){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])|([0-9a-fA-F]{1,4}:){1,4}:)$/ UUID_PATTERN = /^[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}$/ end end json-schema-0.19.1/lib/json_schema/schema.rb0000644000175000017500000002132413374107723020224 0ustar boutilboutilrequire "json" module JsonSchema class Schema TYPE_MAP = { "array" => Array, "boolean" => [FalseClass, TrueClass], "integer" => Integer, "number" => [Integer, Float], "null" => NilClass, "object" => Hash, "string" => String, } include Attributes def initialize # nil out all our fields so that it's possible to instantiate a schema # instance without going through the parser and validate against it # without Ruby throwing warnings about uninitialized instance variables. initialize_attrs # Don't put this in as an attribute default. We require that this precise # pointer gets copied between all clones of any given schema so that they # all share exactly the same set. @clones = Set.new end # Fragment of a JSON Pointer that can help us build a pointer back to this # schema for debugging. attr_accessor :fragment # Rather than a normal schema, the node may be a JSON Reference. In this # case, no other attributes will be filled in except for #parent. attr_accessor :reference attr_copyable :expanded # A reference to the data which the Schema was initialized from. Used for # resolving JSON Pointer references. # # Type: Hash attr_copyable :data # # Relations # # Parent Schema object. Child may come from any of `definitions`, # `properties`, `anyOf`, etc. # # Type: Schema attr_copyable :parent # Collection of clones of this schema object, meaning all Schemas that were # initialized after the original. Used for JSON Reference expansion. The # only copy not present in this set is the original Schema object. # # Note that this doesn't have a default option because we rely on the fact # that the set is the *same object* between all clones of any given schema. # # Type: Set[Schema] attr_copyable :clones # The normalized URI of this schema. Note that child schemas inherit a URI # from their parent unless they have one explicitly defined, so this is # likely not a unique value in any given schema hierarchy. # # Type: String attr_copyable :uri # # Metadata # # Alters resolution scope. This value is used along with the parent scope's # URI to build a new address for this schema. Relative ID's will append to # the parent, and absolute URI's will replace it. # # Type: String attr_schema :id # Short title of the schema (or the hyper-schema link if this is one). # # Type: String attr_schema :title # More detailed description of the schema (or the hyper-schema link if this # is one). # # Type: String attr_schema :description # Default JSON value for this particular schema # # Type: [any] attr_schema :default # # Validation: Any # # A collection of subschemas of which data must validate against the full # set of to be valid. # # Type: Array[Schema] attr_schema :all_of, :default => [], :schema_name => :allOf # A collection of subschemas of which data must validate against any schema # in the set to be be valid. # # Type: Array[Schema] attr_schema :any_of, :default => [], :schema_name => :anyOf # A collection of inlined subschemas. Standard convention is to subschemas # here and reference them from elsewhere. # # Type: Hash[String => Schema] attr_schema :definitions, :default => {} # A collection of objects that must include the data for it to be valid. # # Type: Array attr_schema :enum # A collection of subschemas of which data must validate against exactly # one of to be valid. # # Type: Array[Schema] attr_schema :one_of, :default => [], :schema_name => :oneOf # A subschema which data must not validate against to be valid. # # Type: Schema attr_schema :not # An array of types that data is allowed to be. The spec allows this to be # a string as well, but the parser will always normalize this to an array # of strings. # # Type: Array[String] attr_schema :type, :default => [], :clear_cache => :@type_parsed # validation: array attr_schema :additional_items, :default => true, :schema_name => :additionalItems attr_schema :items attr_schema :max_items, :schema_name => :maxItems attr_schema :min_items, :schema_name => :minItems attr_schema :unique_items, :schema_name => :uniqueItems # validation: number/integer attr_schema :max, :schema_name => :maximum attr_schema :max_exclusive, :default => false, :schema_name => :exclusiveMaximum attr_schema :min, :schema_name => :minimum attr_schema :min_exclusive, :default => false, :schema_name => :exclusiveMinimum attr_schema :multiple_of, :schema_name => :multipleOf # validation: object attr_schema :additional_properties, :default => true, :schema_name => :additionalProperties attr_schema :dependencies, :default => {} attr_schema :max_properties, :schema_name => :maxProperties attr_schema :min_properties, :schema_name => :minProperties attr_schema :pattern_properties, :default => {}, :schema_name => :patternProperties attr_schema :properties, :default => {} attr_schema :required # warning: strictProperties is technically V5 spec (but I needed it now) attr_schema :strict_properties, :default => false, :schema_name => :strictProperties # validation: string attr_schema :format attr_schema :max_length, :schema_name => :maxLength attr_schema :min_length, :schema_name => :minLength attr_schema :pattern # hyperschema attr_schema :links, :default => [] attr_schema :media attr_schema :path_start, :schema_name => :pathStart attr_schema :read_only, :schema_name => :readOnly # hyperschema link attributes attr_schema :enc_type, :schema_name => :encType, :default => "application/json" attr_schema :href attr_schema :media_type, :schema_name => :mediaType, :default => "application/json" attr_schema :method attr_schema :rel attr_schema :schema attr_schema :target_schema, :schema_name => :targetSchema # allow booleans to be access with question mark alias :additional_items? :additional_items alias :expanded? :expanded alias :max_exclusive? :max_exclusive alias :min_exclusive? :min_exclusive alias :read_only? :read_only alias :unique_items? :unique_items def expand_references(options = {}) expander = ReferenceExpander.new if expander.expand(self, options) [true, nil] else [false, expander.errors] end end def expand_references!(options = {}) ReferenceExpander.new.expand!(self, options) true end # An array of Ruby classes that are equivalent to the types defined in the # schema. # # Type: Array[Class] def type_parsed @type_parsed ||= type.flat_map { |t| TYPE_MAP[t] }.compact end def inspect "\#" end def inspect_schema if reference str = reference.to_s str += expanded? ? " [EXPANDED]" : " [COLLAPSED]" str += original? ? " [ORIGINAL]" : " [CLONE]" str else hash = {} self.class.copyable_attrs.each do |copyable, _| next if [:@clones, :@data, :@parent, :@uri].include?(copyable) if value = instance_variable_get(copyable) if value.is_a?(Array) if !value.empty? hash[copyable] = value.map { |v| inspect_value(v) } end elsif value.is_a?(Hash) if !value.empty? hash[copyable] = Hash[*value.map { |k, v| [k, inspect_value(v)] }.flatten] end else hash[copyable] = inspect_value(value) end end end hash end end def inspect_value(value) if value.is_a?(Schema) value.inspect_schema else value.inspect end end def original? !clones.include?(self) end def pointer if parent parent.pointer + "/" + fragment else fragment end end def validate(data, fail_fast: false) validator = Validator.new(self) valid = validator.validate(data, fail_fast: fail_fast) [valid, validator.errors] end def validate!(data, fail_fast: false) Validator.new(self).validate!(data, fail_fast: fail_fast) end # Link subobject for a hyperschema. class Link < Schema inherit_attrs end # Media type subobject for a hyperschema. class Media attr_accessor :binary_encoding attr_accessor :type end end end json-schema-0.19.1/lib/json_schema/configuration.rb0000644000175000017500000000103313374107723021626 0ustar boutilboutilmodule JsonSchema class Configuration attr_accessor :all_of_sub_errors attr_reader :custom_formats attr_reader :validate_regex_with def validate_regex_with=(validator) @validate_regex_with = validator end def register_format(name, validator_proc) @custom_formats[name] = validator_proc end # Used for testing. def reset! @validate_regex_with = nil @custom_formats = {} @all_of_sub_errors = false end private def initialize reset! end end end json-schema-0.19.1/lib/commands/0000755000175000017500000000000013374107723015745 5ustar boutilboutiljson-schema-0.19.1/lib/commands/validate_schema.rb0000644000175000017500000000621713374107723021411 0ustar boutilboutilrequire "json" require "yaml" require_relative "../json_schema" module Commands class ValidateSchema attr_accessor :detect attr_accessor :fail_fast attr_accessor :extra_schemas attr_accessor :errors attr_accessor :messages def initialize @detect = false @fail_fast = false @extra_schemas = [] @errors = [] @messages = [] end def run(argv) return false if !initialize_store if !detect return false if !(schema_file = argv.shift) return false if !(schema = parse(schema_file)) end # if there are no remaining files in arguments, also a problem return false if argv.count < 1 argv.each do |data_file| if !(data = read_file(data_file)) return false end if detect if !(schema_uri = data["$schema"]) @errors = ["#{data_file}: No $schema tag for detection."] return false end if !(schema = @store.lookup_schema(schema_uri)) @errors = ["#{data_file}: Unknown $schema, try specifying one with -s."] return false end end valid, errors = schema.validate(data, fail_fast: fail_fast) if valid @messages += ["#{data_file} is valid."] else @errors = map_schema_errors(data_file, errors) end end @errors.empty? end private def initialize_store @store = JsonSchema::DocumentStore.new extra_schemas.each do |extra_schema| if !(extra_schema = parse(extra_schema)) return false end @store.add_schema(extra_schema) end true end # Builds a JSON Reference + message like "/path/to/file#/path/to/data". def map_schema_errors(file, errors) errors.map { |m| "#{file}#{m}" } end def parse(file) if !(schema_data = read_file(file)) return nil end parser = JsonSchema::Parser.new if !(schema = parser.parse(schema_data)) @errors = map_schema_errors(file, parser.errors) return nil end expander = JsonSchema::ReferenceExpander.new if !expander.expand(schema, store: @store) @errors = map_schema_errors(file, expander.errors) return nil end schema end def read_file(file) contents = File.read(file) # Perform an empty check because boath YAML and JSON's load will return # `nil` in the case of an empty file, which will otherwise produce # confusing results. if contents.empty? @errors = ["#{file}: File is empty."] nil else if File.extname(file) == ".yaml" YAML.load(contents) else JSON.load(contents) end end rescue Errno::ENOENT @errors = ["#{file}: No such file or directory."] nil rescue JSON::ParserError # Ruby's parsing exceptions aren't too helpful, just point user to # a better tool @errors = ["#{file}: Invalid JSON. Try to validate using `jsonlint`."] nil rescue Psych::SyntaxError @errors = ["#{file}: Invalid YAML."] nil end end end json-schema-0.19.1/lib/json_pointer.rb0000644000175000017500000000021713374107723017202 0ustar boutilboutilrequire_relative "json_pointer/evaluator" module JsonPointer def self.evaluate(data, path) Evaluator.new(data).evaluate(path) end end json-schema-0.19.1/lib/json_reference.rb0000644000175000017500000000225113374107723017460 0ustar boutilboutilrequire "uri" require_relative "json_pointer" module JsonReference def self.reference(ref) Reference.new(ref) end class Reference include Comparable attr_accessor :pointer attr_accessor :uri def initialize(ref) # Note that the #to_s of `nil` is an empty string. @uri = nil # given a simple fragment without '#', resolve as a JSON Pointer only as # per spec if ref.include?("#") uri, @pointer = ref.split('#') if uri && !uri.empty? @uri = URI.parse(uri) end @pointer ||= "" else @pointer = ref end # normalize pointers by prepending "#" and stripping trailing "/" @pointer = "#" + @pointer @pointer = @pointer.chomp("/") end def <=>(other) to_s <=> other.to_s end def inspect "\#" end # Given the document addressed by #uri, resolves the JSON Pointer part of # the reference. def resolve_pointer(data) JsonPointer.evaluate(data, @pointer) end def to_s if @uri "#{@uri.to_s}#{@pointer}" else @pointer end end end end json-schema-0.19.1/lib/json_schema.rb0000644000175000017500000000127013374107723016762 0ustar boutilboutilrequire_relative "json_schema/attributes" require_relative "json_schema/configuration" require_relative "json_schema/document_store" require_relative "json_schema/error" require_relative "json_schema/parser" require_relative "json_schema/reference_expander" require_relative "json_schema/schema" require_relative "json_schema/validator" module JsonSchema def self.configure yield configuration end def self.configuration @configuration ||= Configuration.new end def self.parse(data) parser = Parser.new if schema = parser.parse(data) [schema, nil] else [nil, parser.errors] end end def self.parse!(data) Parser.new.parse!(data) end end json-schema-0.19.1/test/0000755000175000017500000000000013374107723014355 5ustar boutilboutiljson-schema-0.19.1/test/json_pointer/0000755000175000017500000000000013374107723017066 5ustar boutilboutiljson-schema-0.19.1/test/json_pointer/evaluator_test.rb0000644000175000017500000000430313374107723022454 0ustar boutilboutilrequire "test_helper" require "json_pointer" require "json_schema" describe JsonPointer::Evaluator do before do @evaluator = JsonPointer::Evaluator.new(data) end it "evaluates pointers according to spec" do assert_equal data, @evaluator.evaluate("") assert_equal ["bar", "baz"], @evaluator.evaluate("/foo") assert_equal "bar", @evaluator.evaluate("/foo/0") assert_equal 0, @evaluator.evaluate("/") assert_equal 1, @evaluator.evaluate("/a~1b") assert_equal 2, @evaluator.evaluate("/c%d") assert_equal 3, @evaluator.evaluate("/e^f") assert_equal 4, @evaluator.evaluate("/g|h") assert_equal 5, @evaluator.evaluate("/i\\j") assert_equal 6, @evaluator.evaluate("/k\"l") assert_equal 7, @evaluator.evaluate("/ ") assert_equal 8, @evaluator.evaluate("/m~0n") end it "takes a leading #" do assert_equal 0, @evaluator.evaluate("#/") end it "returns nils on missing values" do assert_nil @evaluator.evaluate("/bar") end it "raises when a path doesn't being with /" do e = assert_raises(ArgumentError) { @evaluator.evaluate("foo") } assert_equal %{Path must begin with a leading "/": foo.}, e.message e = assert_raises(ArgumentError) { @evaluator.evaluate("#foo") } assert_equal %{Path must begin with a leading "/": #foo.}, e.message end it "raises when a non-digit is specified on an array" do e = assert_raises(ArgumentError) { @evaluator.evaluate("/foo/bar") } assert_equal %{Key operating on an array must be a digit or "-": bar.}, e.message end it "can evaluate on a schema object" do schema = JsonSchema.parse!(DataScaffold.schema_sample) evaluator = JsonPointer::Evaluator.new(schema) res = evaluator.evaluate("#/definitions/app/definitions/contrived/allOf/0") assert_kind_of JsonSchema::Schema, res assert 30, res.max_length end def data { "foo" => ["bar", "baz"], "" => 0, "a/b" => 1, "c%d" => 2, "e^f" => 3, "g|h" => 4, "i\\j" => 5, "k\"l" => 6, " " => 7, "m~n" => 8 } end end json-schema-0.19.1/test/bin_test.rb0000644000175000017500000000111013374107723016502 0ustar boutilboutilrequire "test_helper" # # The purpose of this sets of tests is just to include our Ruby executables # where possible so that we can get very basic sanity checks on their syntax # (which is something that of course Ruby can't do by default). # # We can do this without actually executing them because they're gated by `if # $0 == __FILE__` statements. # describe "executables in bin/" do before do @bin_dir = File.expand_path("../../bin", __FILE__) end it "has roughly valid Ruby structure for validate-schema" do load File.join(@bin_dir, "validate-schema") end end json-schema-0.19.1/test/json_schema/0000755000175000017500000000000013374107723016646 5ustar boutilboutiljson-schema-0.19.1/test/json_schema/error_test.rb0000644000175000017500000000067513374107723021373 0ustar boutilboutilrequire "test_helper" require "json_schema" describe JsonSchema::SchemaError do it "can print a message with a pointer" do schema = JsonSchema::Schema.new schema.fragment = "#" e = JsonSchema::SchemaError.new(schema, "problem", nil) assert_equal "#: problem", e.to_s end it "can print a message without a pointer" do e = JsonSchema::SchemaError.new(nil, "problem", nil) assert_equal "problem", e.to_s end end json-schema-0.19.1/test/json_schema/document_store_test.rb0000644000175000017500000000206613374107723023270 0ustar boutilboutilrequire "test_helper" require "json_schema" describe JsonSchema::DocumentStore do before do @store = JsonSchema::DocumentStore.new end it "adds and looks up a schema" do schema = schema_sample("http://example.com/schema") @store.add_schema(schema) assert_equal schema, @store.lookup_schema(schema.uri) end it "can iterate through its schemas" do uri = "http://example.com/schema" schema = schema_sample(uri) @store.add_schema(schema) assert_equal [[uri, schema]], @store.to_a end it "can lookup a schema added with a document root sign" do uri = "http://example.com/schema" schema = schema_sample(uri + "#") @store.add_schema(schema) assert_equal schema, @store.lookup_schema(uri) end it "can lookup a schema with a document root sign" do uri = "http://example.com/schema" schema = schema_sample(uri) @store.add_schema(schema) assert_equal schema, @store.lookup_schema(uri + "#") end def schema_sample(uri) schema = JsonSchema::Schema.new schema.uri = uri schema end end json-schema-0.19.1/test/json_schema/schema_test.rb0000644000175000017500000000242013374107723021470 0ustar boutilboutilrequire "test_helper" require "json_schema" describe JsonSchema::Schema do it "allows schema attribute access with #[]" do schema = JsonSchema::Schema.new schema.properties = { "foo" => nil } assert_equal({ "foo" => nil }, schema[:properties]) end it "allows schema attribute access with #[] and overridden name" do schema = JsonSchema::Schema.new schema.additional_properties = { "foo" => nil } assert_equal({ "foo" => nil }, schema[:additionalProperties]) end it "allows schema attribute access with #[] as string" do schema = JsonSchema::Schema.new schema.properties = { "foo" => nil } assert_equal({ "foo" => nil }, schema["properties"]) end it "raises if attempting to access #[] with bad method" do schema = JsonSchema::Schema.new assert_raises NoMethodError do schema[:wat] end end it "raises if attempting to access #[] with non-schema attribute" do schema = JsonSchema::Schema.new assert_raises NoMethodError do schema[:expanded] end end it "updates type_parsed when type is changed" do schema = JsonSchema::Schema.new schema.type = ["integer"] assert_equal [Integer], schema.type_parsed schema.type = ["string"] assert_equal [String], schema.type_parsed end end json-schema-0.19.1/test/json_schema/attribute_test.rb0000644000175000017500000000554213374107723022243 0ustar boutilboutilrequire "test_helper" require "json_schema" describe JsonSchema::Attributes do it "defines copyable attributes" do obj = TestAttributes.new obj.copyable = "foo" assert_equal "foo", obj.copyable assert_includes obj.class.copyable_attrs, :@copyable end it "defines schema attributes" do obj = TestAttributes.new obj.schema = "foo" assert_equal "foo", obj.schema assert_equal({:schema => :schema, :named => :schema_named, :cached => :cached}, obj.class.schema_attrs) end it "defines attributes with default readers" do obj = TestAttributes.new assert_equal [], obj.copyable_default assert_equal "application/json", obj.copyable_default_with_string hash = obj.copyable_default_with_object assert_equal({}, hash) hash[:x] = 123 # This is a check to make sure that the new object is not the same object # as the one that we just mutated above. When assigning defaults the module # should dup any common data strcutures that it puts in here. obj = TestAttributes.new hash = obj.copyable_default_with_object assert_equal({}, hash) end it "inherits attributes when so instructed" do obj = TestAttributesDescendant.new assert_includes obj.class.copyable_attrs, :@copyable end it "allows schema attributes to be indexed but not others" do obj = TestAttributes.new obj.copyable = "non-schema" obj.schema = "schema" assert_raises NoMethodError do assert_nil obj[:copyable] end assert_equal "schema", obj[:schema] end it "copies attributes with #copy_from" do obj = TestAttributes.new obj.copyable = "copyable" obj.schema = "schema" obj2 = TestAttributes.new obj2.copy_from(obj) assert_equal "copyable", obj2.copyable assert_equal "schema", obj2.schema end it "initializes attributes with #initialize_attrs" do obj = TestAttributes.new # should produce a nil value *without* a Ruby warning assert_nil obj.copyable assert_nil obj.schema end it "cleans cached values when assigning parent attribute" do obj = TestAttributes.new obj.cached = "test" assert_equal "test_123", obj.cached_parsed obj.cached = "other" assert_equal "other_123", obj.cached_parsed end class TestAttributes include JsonSchema::Attributes def initialize initialize_attrs end attr_copyable :copyable attr_schema :schema attr_schema :schema_named, :schema_name => :named attr_schema :cached, :clear_cache => :@cached_parsed def cached_parsed @cached_parsed ||= "#{cached}_123" end attr_copyable :copyable_default, :default => [] attr_copyable :copyable_default_with_string, :default => "application/json" attr_copyable :copyable_default_with_object, :default => {} end class TestAttributesDescendant < TestAttributes inherit_attrs end end json-schema-0.19.1/test/json_schema/parser_test.rb0000644000175000017500000002626313374107723021537 0ustar boutilboutilrequire "test_helper" require "json_schema" describe JsonSchema::Parser do after do JsonSchema.configuration.reset! end it "parses the basic attributes of a schema" do schema = parse assert_nil schema.id assert_equal "Example API", schema.title assert_equal "An example API.", schema.description assert_equal ["object"], schema.type assert_equal "/", schema.uri end it "parses subschemas" do schema = parse.definitions["app"] assert_nil schema.reference assert_equal "App", schema.title assert_equal "An app.", schema.description assert_equal "schemata/app", schema.id assert_equal ["object"], schema.type assert_equal "/schemata/app", schema.uri refute_nil schema.parent end it "parses sub-subschemas" do schema = parse.definitions["app"].definitions["name"] assert_nil schema.reference assert_equal "hello-world", schema.default assert_equal "unique name of app", schema.description assert_equal ["string"], schema.type assert_equal "/schemata/app", schema.uri refute_nil schema.parent end it "parses references" do schema = parse.properties["app"] refute_nil schema.reference assert_nil schema.reference.uri assert_equal "#/definitions/app", schema.reference.pointer refute_nil schema.parent end it "parses enum validation" do schema = parse.definitions["app"].definitions["visibility"] assert_equal ["private", "public"], schema.enum end it "parses array validations" do schema = parse.definitions["app"].definitions["flags"] assert_equal(/^[a-z][a-z\-]*[a-z]$/, schema.items.pattern) assert_equal 1, schema.min_items assert_equal 10, schema.max_items assert_equal true, schema.unique_items end it "parses array items tuple validation" do pointer("#/definitions/app/definitions/flags").merge!( "items" => [ { "enum" => ["bamboo", "cedar"] }, { "enum" => ["http", "https"] } ] ) schema = parse.definitions["app"].definitions["flags"] assert_equal ["bamboo", "cedar"], schema.items[0].enum assert_equal ["http", "https"], schema.items[1].enum end it "parses array additionalItems object validation as boolean" do pointer("#/definitions/app/definitions/flags").merge!( "additionalItems" => false ) schema = parse.definitions["app"].definitions["flags"] assert_equal false, schema.additional_items end it "parses array additionalItems object validation as schema" do pointer("#/definitions/app/definitions/flags").merge!( "additionalItems" => { "type" => "boolean" } ) schema = parse.definitions["app"].definitions["flags"].additional_items assert_equal ["boolean"], schema.type end it "parses integer validations" do schema = parse.definitions["app"].definitions["id"] assert_equal 0, schema.min assert_equal true, schema.min_exclusive assert_equal 10000, schema.max assert_equal false, schema.max_exclusive assert_equal 1, schema.multiple_of end it "parses number validations" do schema = parse.definitions["app"].definitions["cost"] assert_equal 0.0, schema.min assert_equal false, schema.min_exclusive assert_equal 1000.0, schema.max assert_equal true, schema.max_exclusive assert_equal 0.01, schema.multiple_of end it "parses the basic set of object validations" do schema = parse.definitions["app"] assert_equal 10, schema.max_properties assert_equal 1, schema.min_properties assert_equal ["name"], schema.required end it "parses the additionalProperties object validation as boolean" do pointer("#/definitions/app").merge!( "additionalProperties" => false ) schema = parse.definitions["app"] assert_equal false, schema.additional_properties end it "parses the additionalProperties object validation as schema" do pointer("#/definitions/app").merge!( "additionalProperties" => { "type" => "boolean" } ) schema = parse.definitions["app"].additional_properties assert_equal ["boolean"], schema.type end it "parses the dependencies object validation" do schema = parse.definitions["app"] assert_equal ["ssl"], schema.dependencies["production"] assert_equal 20.0, schema.dependencies["ssl"].properties["cost"].min end it "parses the patternProperties object validation" do schema = parse.definitions["app"].definitions["config_vars"] property = schema.pattern_properties.first assert_equal(/^\w+$/, property[0]) assert_equal ["null", "string"], property[1].type end it "parses the strictProperties object validation" do pointer("#/definitions/app").merge!( "strictProperties" => true ) schema = parse.definitions["app"] assert_equal true, schema.strict_properties end # couldn't think of any non-contrived examples to work with here it "parses the basic set of schema validations" do schema = parse.definitions["app"].definitions["contrived"] assert_equal 2, schema.all_of.count assert_equal 2, schema.one_of.count assert schema.not end it "parses the anyOf schema validation" do schema = parse.definitions["app"].definitions["identity"] assert_equal 2, schema.any_of.count assert_equal "/schemata/app#/definitions/id", schema.any_of[0].reference.to_s assert_equal "/schemata/app#/definitions/name", schema.any_of[1].reference.to_s end it "parses basic set of string validations" do schema = parse.definitions["app"].definitions["name"] assert_equal 30, schema.max_length assert_equal 3, schema.min_length assert_equal(/^[a-z][a-z0-9-]{3,30}$/, schema.pattern) end it "parses hypermedia links" do pointer("#/definitions/app").merge!( "links" => [ "description" => "Create a new app.", "encType" => "application/x-www-form-urlencoded", "href" => "/apps", "method" => "POST", "rel" => "create", "mediaType" => "application/json", "schema" => { "properties" => { "name" => { "$ref" => "#/definitions/app/definitions/name" }, } }, "targetSchema" => { "$ref" => "#/definitions/app" } ] ) schema = parse.definitions["app"] link = schema.links[0] assert_equal schema, link.parent assert_equal "links/0", link.fragment assert_equal "#/definitions/app/links/0", link.pointer assert_equal "Create a new app.", link.description assert_equal "application/x-www-form-urlencoded", link.enc_type assert_equal "/apps", link.href assert_equal :post, link.method assert_equal "create", link.rel assert_equal "application/json", link.media_type assert_equal "#/definitions/app/definitions/name", link.schema.properties["name"].reference.pointer end it "parses hypermedia media" do pointer("#/definitions/app/media").merge!( "binaryEncoding" => "base64", "type" => "image/png" ) schema = parse.definitions["app"] assert_equal "base64", schema.media.binary_encoding assert_equal "image/png", schema.media.type end it "parses hypermedia pathStart" do pointer("#/definitions/app").merge!( "pathStart" => "/v2" ) schema = parse.definitions["app"] assert_equal "/v2", schema.path_start end it "parses hypermedia readOnly" do pointer("#/definitions/app").merge!( "readOnly" => true ) schema = parse.definitions["app"] assert_equal true, schema.read_only end it "builds appropriate JSON Pointers" do schema = parse.definitions["app"].definitions["name"] assert_equal "#/definitions/app/definitions/name", schema.pointer end it "errors on non-string ids" do schema_sample["id"] = 4 refute parse assert_includes error_messages, %{4 is not a valid "id", must be a string.} assert_includes error_types, :invalid_type end it "errors on non-string titles" do schema_sample["title"] = 4 refute parse assert_includes error_messages, %{4 is not a valid "title", must be a string.} assert_includes error_types, :invalid_type end it "errors on non-string descriptions" do schema_sample["description"] = 4 refute parse assert_includes error_messages, %{4 is not a valid "description", must be a string.} assert_includes error_types, :invalid_type end it "errors on non-array and non-string types" do schema_sample["type"] = 4 refute parse assert_includes error_messages, %{4 is not a valid "type", must be a array/string.} assert_includes error_types, :invalid_type end it "errors on unknown types" do schema_sample["type"] = ["float", "double"] refute parse assert_includes error_messages, %{Unknown types: double, float.} assert_includes error_types, :unknown_type end it "errors on unknown formats" do schema_sample["format"] = "obscure-thing" refute parse assert_includes error_messages, '"obscure-thing" is not a valid format, ' \ 'must be one of date, date-time, email, ' \ 'hostname, ipv4, ipv6, regex, uri, ' \ 'uri-reference, uuid.' assert_includes error_types, :unknown_format end it "passes for an invalid regex when not asked to check" do schema_sample["pattern"] = "\\Ameow" assert parse end it "errors for an invalid regex when asked to check" do require 'ecma-re-validator' JsonSchema.configure do |c| c.validate_regex_with = :'ecma-re-validator' end schema_sample["pattern"] = "\\Ameow" refute parse assert_includes error_messages, '"\\\\Ameow" is not an ECMA-262 regular expression.' assert_includes error_types, :regex_failed end it "parses custom formats" do JsonSchema.configure do |c| c.register_format 'the-answer', ->(data) { data.to_i == 42 } end schema_sample["format"] = "the-answer" assert parse end it "rejects bad formats even when there are custom formats defined" do JsonSchema.configure do |c| c.register_format "the-answer", ->(data) { data.to_i == 42 } end schema_sample["format"] = "not-a-format" refute parse assert_includes error_messages, '"not-a-format" is not a valid format, ' \ 'must be one of date, date-time, email, ' \ 'hostname, ipv4, ipv6, regex, uri, ' \ 'uri-reference, uuid, the-answer.' assert_includes error_types, :unknown_format end it "raises an aggregate error with parse!" do schema_sample["id"] = 4 parser = JsonSchema::Parser.new # don't bother checking the particulars of the error here because we have # other tests for that above assert_raises JsonSchema::AggregateError do parser.parse!(schema_sample) end end def error_messages @parser.errors.map { |e| e.message } end def error_types @parser.errors.map { |e| e.type } end def parse @parser = JsonSchema::Parser.new @parser.parse(schema_sample) end def pointer(path) JsonPointer.evaluate(schema_sample, path) end def schema_sample @schema_sample ||= DataScaffold.schema_sample end end json-schema-0.19.1/test/json_schema/reference_expander_test.rb0000644000175000017500000002260613374107723024064 0ustar boutilboutilrequire "test_helper" require "json_schema" describe JsonSchema::ReferenceExpander do it "expands references" do expand assert_equal [], error_messages # this was always a fully-defined property referenced = @schema.definitions["app"] # this used to be a $ref reference = @schema.properties["app"] assert_equal "#/definitions/app", reference.reference.pointer assert_equal referenced.description, reference.description assert_equal referenced.id, reference.id assert_equal referenced.type, reference.type assert_equal referenced.uri, reference.uri end it "takes a document store" do store = JsonSchema::DocumentStore.new expand(store: store) assert_equal store, @expander.store end it "will expand anyOf" do expand assert_equal [], error_messages schema = @schema.properties["app"].definitions["contrived_plus"] assert_equal 3, schema.any_of[0].min_length assert_equal 5, schema.any_of[1].min_length end it "will expand allOf" do expand assert_equal [], error_messages schema = @schema.properties["app"].definitions["contrived_plus"] assert_equal 30, schema.all_of[0].max_length assert_equal 3, schema.all_of[1].min_length end it "will expand dependencies" do expand assert_equal [], error_messages schema = @schema.properties["app"].dependencies["ssl"].properties["name"] assert_equal ["string"], schema.type end it "will expand items list schema" do pointer("#/definitions/app/definitions/flags").merge!( "items" => { "$ref" => "#/definitions/app/definitions/name" } ) expand assert_equal [], error_messages schema = @schema.properties["app"].properties["flags"].items assert_equal ["string"], schema.type end it "will expand items tuple schema" do pointer("#/definitions/app/definitions/flags").merge!( "items" => [ { "$ref" => "#/definitions/app/definitions/name" }, { "$ref" => "#/definitions/app/definitions/owner" } ] ) expand assert_equal [], error_messages schema0 = @schema.properties["app"].properties["flags"].items[0] schema1 = @schema.properties["app"].properties["flags"].items[0] assert_equal ["string"], schema0.type assert_equal ["string"], schema1.type end it "will expand oneOf" do expand assert_equal [], error_messages schema = @schema.properties["app"].definitions["contrived_plus"] assert_equal(/^(foo|aaa)$/, schema.one_of[0].pattern) assert_equal(/^(foo|zzz)$/, schema.one_of[1].pattern) end it "will expand not" do expand assert_equal [], error_messages schema = @schema.properties["app"].definitions["contrived_plus"] assert_equal(/^$/, schema.not.pattern) end it "will expand additionalProperties" do pointer("#").merge!( "additionalProperties" => { "$ref" => "#" } ) expand assert_equal [], error_messages schema = @schema.additional_properties assert_equal ["object"], schema.type end it "will expand patternProperties" do expand assert_equal [], error_messages # value ([1]) of the #first tuple in hash schema = @schema.properties["app"].definitions["roles"]. pattern_properties.first[1] assert_equal ["string"], schema.type end it "will expand hyperschema link schemas" do expand assert_equal [], error_messages schema = @schema.properties["app"].links[0].schema.properties["name"] assert_equal ["string"], schema.type end it "will expand hyperschema link targetSchemas" do expand assert_equal [], error_messages schema = @schema.properties["app"].links[0].target_schema.properties["name"] assert_equal ["string"], schema.type end it "will perform multiple passes to resolve all references" do pointer("#/properties").merge!( "app0" => { "$ref" => "#/properties/app1" }, "app1" => { "$ref" => "#/properties/app2" }, "app2" => { "$ref" => "#/definitions/app" }, ) expand assert_equal [], error_messages schema = @schema.properties["app0"] assert_equal ["object"], schema.type end it "will resolve circular dependencies" do pointer("#/properties").merge!( "app" => { "$ref" => "#" } ) expand assert_equal [], error_messages schema = @schema.properties["app"] assert_equal ["object"], schema.type end it "builds appropriate JSON Pointers for expanded references" do expand assert_equal [], error_messages # the *referenced* schema should still have a proper pointer schema = @schema.definitions["app"].definitions["name"] assert_equal "#/definitions/app/definitions/name", schema.pointer # the *reference* schema should have expanded a pointer schema = @schema.properties["app"].properties["name"] assert_equal "#/definitions/app/properties/name", schema.pointer end # clones are special in that they retain their original pointer despite where # they've been nested it "builds appropriate JSON Pointers for circular dependencies" do pointer("#/properties").merge!( "app" => { "$ref" => "#" }, "app1" => { "$ref" => "#/properties/app"} ) expand # the first self reference has the standard pointer as expected schema = @schema.properties["app"] assert_equal "#/properties/app", schema.pointer # but diving deeper results in the same pointer again schema = schema.properties["app"] assert_equal "#/properties/app", schema.pointer schema = @schema.properties["app1"] assert_equal "#/properties/app1", schema.pointer schema = schema.properties["app1"] assert_equal "#/properties/app1", schema.pointer end it "errors on a JSON Pointer that can't be resolved" do pointer("#/properties").merge!( "app" => { "$ref" => "#/definitions/nope" } ) refute expand assert_includes error_messages, %{Couldn't resolve pointer "#/definitions/nope".} assert_includes error_types, :unresolved_pointer assert_includes error_messages, %{Couldn't resolve references: #/definitions/nope.} assert_includes error_types, :unresolved_references end it "errors on a URI that can't be resolved" do pointer("#/properties").merge!( "app" => { "$ref" => "/schemata/user#/definitions/name" } ) refute expand assert_includes error_messages, %{Couldn't resolve references: /schemata/user#/definitions/name.} assert_includes error_types, :unresolved_references assert_includes error_messages, %{Couldn't resolve URI: /schemata/user.} assert_includes error_types, :unresolved_pointer end it "errors on a relative URI that cannot be transformed to an absolute" do pointer("#/properties").merge!( "app" => { "$ref" => "relative#definitions/name" } ) refute expand assert_includes error_messages, %{Couldn't resolve references: relative#definitions/name.} assert_includes error_types, :unresolved_references end it "errors on a reference cycle" do pointer("#/properties").merge!( "app0" => { "$ref" => "#/properties/app2" }, "app1" => { "$ref" => "#/properties/app0" }, "app2" => { "$ref" => "#/properties/app1" }, ) refute expand properties = "#/properties/app0, #/properties/app1, #/properties/app2" assert_includes error_messages, %{Reference loop detected: #{properties}.} assert_includes error_types, :loop_detected assert_includes error_messages, %{Couldn't resolve references: #{properties}.} assert_includes error_types, :unresolved_references end it "raises an aggregate error with expand!" do pointer("#/properties").merge!( "app" => { "$ref" => "#/definitions/nope" } ) schema = JsonSchema::Parser.new.parse!(schema_sample) expander = JsonSchema::ReferenceExpander.new # don't bother checking the particulars of the error here because we have # other tests for that above assert_raises JsonSchema::AggregateError do expander.expand!(schema) end end it "expands a schema that is just a reference" do # First initialize another schema. Give it a fully qualified URI so that we # can reference it across schemas. schema = JsonSchema::Parser.new.parse!(schema_sample) schema.uri = "http://json-schema.org/test" # Initialize a store and add our schema to it. store = JsonSchema::DocumentStore.new store.add_schema(schema) # Have the parser parse _just_ a reference. It should resolve to a # subschema in the schema that we initialized above. schema = JsonSchema::Parser.new.parse!( { "$ref" => "http://json-schema.org/test#/definitions/app" } ) expander = JsonSchema::ReferenceExpander.new expander.expand!(schema, store: store) assert schema.expanded? end it "expands a reference to a link" do pointer("#/properties").merge!( "link" => { "$ref" => "#/links/0" } ) assert expand referenced = @schema.links[0] reference = @schema.properties["link"] assert_equal reference.href, referenced.href end def error_messages @expander.errors.map { |e| e.message } end def error_types @expander.errors.map { |e| e.type } end def pointer(path) JsonPointer.evaluate(schema_sample, path) end def schema_sample @schema_sample ||= DataScaffold.schema_sample end def expand(options = {}) @schema = JsonSchema::Parser.new.parse!(schema_sample) @expander = JsonSchema::ReferenceExpander.new @expander.expand(@schema, options) end end json-schema-0.19.1/test/json_schema/validator_test.rb0000644000175000017500000010203713374107723022222 0ustar boutilboutilrequire "test_helper" require "json_schema" describe JsonSchema::Validator do after do JsonSchema.configuration.reset! end it "can find data valid" do assert_valid end it "validates enum successfully" do pointer("#/definitions/app/definitions/visibility").merge!( "enum" => ["private", "public"] ) data_sample["visibility"] = "public" assert_valid end it "validates enum unsuccessfully" do pointer("#/definitions/app/definitions/visibility").merge!( "enum" => ["private", "public"] ) data_sample["visibility"] = "personal" refute_valid assert_includes error_messages, %{personal is not a member of ["private", "public"].} assert_includes error_types, :invalid_type end it "validates type successfully" do pointer("#/definitions/app").merge!( "type" => ["object"] ) @data_sample = { "name" => "cloudnasium" } assert_valid end it "validates sub-type successfully" do pointer("#/definitions/app").merge!( "type" => ["object"] ) class SomeClass < Hash; end @data_sample = SomeClass.new @data_sample["name"] = "yayrails" assert_valid end it "validates type unsuccessfully" do pointer("#/definitions/app").merge!( "type" => ["object"] ) @data_sample = 4 refute_valid assert_includes error_messages, %{For 'definitions/app', 4 is not an object.} assert_includes error_types, :invalid_type assert_includes error_data, 4 end it "provides accurate error messages for multiple type errors" do pointer("#/definitions/app").merge!( "type" => ["string"] ) @data_sample = 4 refute_valid assert_includes error_messages, %{For 'definitions/app', 4 is not a string.} assert_includes error_types, :invalid_type pointer("#/definitions/app").merge!( "type" => ["string", "null"] ) @data_sample = 4 refute_valid assert_includes error_messages, %{For 'definitions/app', 4 is not a string or null.} assert_includes error_types, :invalid_type pointer("#/definitions/app").merge!( "type" => ["object", "null", "string"] ) @data_sample = 4 refute_valid assert_includes error_messages, %{For 'definitions/app', 4 is not an object, null, or string.} assert_includes error_types, :invalid_type end it "validates items with list successfully" do pointer("#/definitions/app/definitions/flags").merge!( "items" => { "pattern" => "^[a-z][a-z\\-]*[a-z]$" } ) data_sample["flags"] = ["websockets"] assert_valid end it "validates items with list unsuccessfully" do pointer("#/definitions/app/definitions/flags").merge!( "items" => { "pattern" => "^[a-z][a-z\\-]*[a-z]$" } ) data_sample["flags"] = ["1337"] refute_valid assert_includes error_messages, %{1337 does not match /^[a-z][a-z\\-]*[a-z]$/.} assert_includes error_types, :pattern_failed assert_includes error_data, "1337" end it "validates items with tuple successfully" do pointer("#/definitions/app/definitions/flags").merge!( "items" => [ { "enum" => ["bamboo", "cedar"] }, { "enum" => ["http", "https"] } ] ) data_sample["flags"] = ["cedar", "https"] assert_valid end it "validates items with tuple with additionalItems boolean successfully" do pointer("#/definitions/app/definitions/flags").merge!( "additionalItems" => true, "items" => [ { "enum" => ["bamboo", "cedar"] }, { "enum" => ["http", "https"] } ] ) data_sample["flags"] = ["cedar", "https", "websockets"] assert_valid end it "validates items with tuple with additionalItems boolean unsuccessfully" do pointer("#/definitions/app/definitions/flags").merge!( "additionalItems" => false, "items" => [ { "enum" => ["bamboo", "cedar"] }, { "enum" => ["http", "https"] } ] ) data_sample["flags"] = ["cedar", "https", "websockets"] refute_valid assert_includes error_messages, %{No more than 2 items are allowed; 3 were supplied.} assert_includes error_types, :max_items_failed assert_includes error_data, ["cedar", "https", "websockets"] end it "validates items with tuple with additionalItems schema successfully" do pointer("#/definitions/app/definitions/flags").merge!( "additionalItems" => { "enum" => [ "foo", "websockets" ] }, "items" => [ { "enum" => ["bamboo", "cedar"] }, { "enum" => ["http", "https"] } ] ) data_sample["flags"] = ["cedar", "https", "websockets"] assert_valid end it "validates items with tuple with additionalItems schema unsuccessfully for non-conforming additional item" do pointer("#/definitions/app/definitions/flags").merge!( "additionalItems" => { "enum" => [ "foo", "bar" ] }, "items" => [ { "enum" => ["bamboo", "cedar"] }, { "enum" => ["http", "https"] } ] ) data_sample["flags"] = ["cedar", "https", "websockets"] refute_valid assert_includes error_messages, %{websockets is not a member of ["foo", "bar"].} assert_includes error_types, :invalid_type assert_includes error_data, "websockets" end it "validates items with tuple with additionalItems schema unsuccessfully with multiple failures" do pointer("#/definitions/app/definitions/flags").merge!( "additionalItems" => { "enum" => [ "foo", "bar" ] }, "items" => [ { "enum" => ["bamboo", "cedar"] }, { "enum" => ["http", "https"] } ] ) data_sample["flags"] = ["cedar", "https", "websockets", "1337"] refute_valid assert_includes error_messages, %{websockets is not a member of ["foo", "bar"].} assert_includes error_types, :invalid_type assert_includes error_data, "websockets" assert_includes error_messages, %{1337 is not a member of ["foo", "bar"].} assert_includes error_types, :invalid_type assert_includes error_data, "1337" end it "validates items with tuple with additionalItems schema unsuccessfully with non-conforming items and additional items" do pointer("#/definitions/app/definitions/flags").merge!( "additionalItems" => { "enum" => [ "foo", "bar" ] }, "items" => [ { "enum" => ["bamboo", "cedar"] }, { "enum" => ["http", "https"] } ] ) data_sample["flags"] = ["cedar", "1337", "websockets"] refute_valid assert_includes error_messages, %{websockets is not a member of ["foo", "bar"].} assert_includes error_types, :invalid_type assert_includes error_data, "websockets" assert_includes error_messages, %{1337 is not a member of ["http", "https"].} assert_includes error_types, :invalid_type assert_includes error_data, "1337" end it "validates items with tuple unsuccessfully for not enough items" do pointer("#/definitions/app/definitions/flags").merge!( "items" => [ { "enum" => ["bamboo", "cedar"] }, { "enum" => ["http", "https"] } ] ) data_sample["flags"] = ["cedar"] refute_valid assert_includes error_messages, %{2 items required; only 1 was supplied.} assert_includes error_types, :min_items_failed assert_includes error_data, ["cedar"] end it "validates items with tuple unsuccessfully for too many items" do pointer("#/definitions/app/definitions/flags").merge!( "additionalItems" => false, "items" => [ { "enum" => ["bamboo", "cedar"] }, { "enum" => ["http", "https"] } ] ) data_sample["flags"] = ["cedar", "https", "websockets"] refute_valid assert_includes error_messages, %{No more than 2 items are allowed; 3 were supplied.} assert_includes error_types, :max_items_failed assert_includes error_data, ["cedar", "https", "websockets"] end it "validates items with tuple unsuccessfully for non-conforming items" do pointer("#/definitions/app/definitions/flags").merge!( "additionalItems" => false, "items" => [ { "enum" => ["bamboo", "cedar"] }, { "enum" => ["http", "https"] } ] ) data_sample["flags"] = ["cedar", "1337"] refute_valid assert_includes error_messages, %{1337 is not a member of ["http", "https"].} assert_includes error_types, :invalid_type assert_includes error_data, "1337" end it "validates maxItems successfully" do pointer("#/definitions/app/definitions/flags").merge!( "maxItems" => 10 ) data_sample["flags"] = (0...10).to_a assert_valid end it "validates maxItems unsuccessfully" do pointer("#/definitions/app/definitions/flags").merge!( "maxItems" => 10 ) data_sample["flags"] = (0...11).to_a refute_valid assert_includes error_messages, %{No more than 10 items are allowed; 11 were supplied.} assert_includes error_types, :max_items_failed assert_includes error_data, (0...11).to_a end it "validates minItems successfully" do pointer("#/definitions/app/definitions/flags").merge!( "minItems" => 1 ) data_sample["flags"] = ["websockets"] assert_valid end it "validates minItems unsuccessfully" do pointer("#/definitions/app/definitions/flags").merge!( "minItems" => 1 ) data_sample["flags"] = [] refute_valid assert_includes error_messages, %{1 item required; only 0 were supplied.} assert_includes error_types, :min_items_failed assert_includes error_data, [] end it "validates uniqueItems successfully" do pointer("#/definitions/app/definitions/flags").merge!( "uniqueItems" => true ) data_sample["flags"] = ["websockets"] assert_valid end it "validates uniqueItems unsuccessfully" do pointer("#/definitions/app/definitions/flags").merge!( "uniqueItems" => true ) data_sample["flags"] = ["websockets", "websockets"] refute_valid assert_includes error_messages, %{Duplicate items are not allowed.} assert_includes error_types, :unique_items_failed assert_includes error_data, ["websockets", "websockets"] end it "validates maximum for an integer with exclusiveMaximum false" do pointer("#/definitions/app/definitions/id").merge!( "exclusiveMaximum" => false, "maximum" => 10 ) data_sample["id"] = 11 refute_valid assert_includes error_messages, %{11 must be less than or equal to 10.} assert_includes error_types, :max_failed assert_includes error_data, 11 end it "validates maximum for an integer with exclusiveMaximum true" do pointer("#/definitions/app/definitions/id").merge!( "exclusiveMaximum" => true, "maximum" => 10 ) data_sample["id"] = 10 refute_valid assert_includes error_messages, %{10 must be less than 10.} assert_includes error_types, :max_failed end it "validates maximum for a number with exclusiveMaximum false" do pointer("#/definitions/app/definitions/cost").merge!( "exclusiveMaximum" => false, "maximum" => 10.0 ) data_sample["cost"] = 10.1 refute_valid assert_includes error_messages, %{10.1 must be less than or equal to 10.0.} assert_includes error_types, :max_failed end it "validates maximum for a number with exclusiveMaximum true" do pointer("#/definitions/app/definitions/cost").merge!( "exclusiveMaximum" => true, "maximum" => 10.0 ) data_sample["cost"] = 10.0 refute_valid assert_includes error_messages, %{10.0 must be less than 10.0.} assert_includes error_types, :max_failed end it "validates minimum for an integer with exclusiveMaximum false" do pointer("#/definitions/app/definitions/id").merge!( "exclusiveMinimum" => false, "minimum" => 1 ) data_sample["id"] = 0 refute_valid assert_includes error_messages, %{0 must be greater than or equal to 1.} assert_includes error_types, :min_failed assert_includes error_data, 0 end it "validates minimum for an integer with exclusiveMaximum true" do pointer("#/definitions/app/definitions/id").merge!( "exclusiveMinimum" => true, "minimum" => 1 ) data_sample["id"] = 1 refute_valid assert_includes error_messages, %{1 must be greater than 1.} end it "validates minimum for a number with exclusiveMaximum false" do pointer("#/definitions/app/definitions/cost").merge!( "exclusiveMinimum" => false, "minimum" => 0.0 ) data_sample["cost"] = -0.01 refute_valid assert_includes error_messages, %{-0.01 must be greater than or equal to 0.0.} assert_includes error_types, :min_failed end it "validates minimum for a number with exclusiveMaximum true" do pointer("#/definitions/app/definitions/cost").merge!( "exclusiveMinimum" => true, "minimum" => 0.0 ) data_sample["cost"] = 0.0 refute_valid assert_includes error_messages, %{0.0 must be greater than 0.0.} assert_includes error_types, :min_failed end it "validates multipleOf for an integer" do pointer("#/definitions/app/definitions/id").merge!( "multipleOf" => 2 ) data_sample["id"] = 1 refute_valid assert_includes error_messages, %{1 is not a multiple of 2.} assert_includes error_types, :multiple_of_failed assert_includes error_data, 1 end it "validates multipleOf for a number" do pointer("#/definitions/app/definitions/cost").merge!( "multipleOf" => 0.01 ) data_sample["cost"] = 0.005 refute_valid assert_includes error_messages, %{0.005 is not a multiple of 0.01.} assert_includes error_types, :multiple_of_failed end it "validates additionalProperties boolean successfully" do pointer("#/definitions/app").merge!( "additionalProperties" => true ) data_sample["foo"] = "bar" assert_valid end it "validates additionalProperties boolean unsuccessfully" do pointer("#/definitions/app").merge!( "additionalProperties" => false, "patternProperties" => { "^matches" => {} } ) data_sample["foo"] = "bar" data_sample["matches_pattern"] = "yes!" refute_valid assert_includes error_messages, %{"foo" is not a permitted key.} assert_includes error_types, :invalid_keys end it "validates additionalProperties boolean unsuccessfully with multiple failures" do pointer("#/definitions/app").merge!( "additionalProperties" => false, "patternProperties" => { "^matches" => {} } ) data_sample["foo"] = "bar" data_sample["baz"] = "blah" data_sample["matches_pattern"] = "yes!" refute_valid assert_includes error_messages, %{"baz", "foo" are not permitted keys.} assert_includes error_types, :invalid_keys end it "validates additionalProperties schema successfully" do pointer("#/definitions/app").merge!( "additionalProperties" => { "type" => ["boolean"] } ) data_sample["foo"] = true assert_valid end it "validates additionalProperties schema unsuccessfully" do pointer("#/definitions/app").merge!( "additionalProperties" => { "type" => ["boolean"] }, "patternProperties" => { "^matches" => {} } ) data_sample["foo"] = 4 data_sample["matches_pattern"] = "yes!" refute_valid assert_includes error_messages, %{For 'additionalProperties', 4 is not a boolean.} assert_includes error_types, :invalid_type end it "validates simple dependencies" do pointer("#/definitions/app/dependencies").merge!( "production" => "ssl" ) data_sample["production"] = true refute_valid assert_includes error_messages, %{"ssl" wasn't supplied.} end it "validates schema dependencies" do pointer("#/definitions/app/dependencies").merge!( "ssl" => { "properties" => { "cost" => { "minimum" => 20.0, } } } ) data_sample["cost"] = 10.0 data_sample["ssl"] = true refute_valid assert_includes error_messages, %{10.0 must be greater than or equal to 20.0.} assert_includes error_types, :min_failed end it "validates maxProperties" do pointer("#/definitions/app").merge!( "maxProperties" => 0 ) data_sample["name"] = "cloudnasium" refute_valid assert_includes error_messages, %{No more than 0 properties are allowed; 1 was supplied.} assert_includes error_types, :max_properties_failed assert_includes error_data, { "name" => "cloudnasium" } end it "validates minProperties" do pointer("#/definitions/app").merge!( "minProperties" => 2 ) data_sample["name"] = "cloudnasium" refute_valid assert_includes error_messages, %{At least 2 properties are required; 1 was supplied.} assert_includes error_types, :min_properties_failed assert_includes error_data, { "name" => "cloudnasium" } end it "validates patternProperties" do pointer("#/definitions/app/definitions/config_vars").merge!( "patternProperties" => { "^\\w+$" => { "type" => ["null", "string"] } } ) data_sample["config_vars"] = { "" => 123, "KEY" => 456 } refute_valid assert_includes error_messages, %{For 'definitions/config_vars', 456 is not a null or string.} assert_includes error_types, :invalid_type end it "validates patternProperties with missing parent" do data_sample["S_0"] = 123 refute validate_parentless_pattern assert_includes error_messages, %{For 'patternProperties/^S_', 123 is not a string.} assert_includes error_types, :invalid_type end it "validates required" do pointer("#/definitions/app/dependencies").merge!( "required" => ["name"] ) data_sample.delete("name") refute_valid assert_includes error_messages, %{"name" wasn't supplied.} assert_includes error_types, :required_failed assert_includes error_data, ["name"] end it "validates strictProperties successfully" do pointer("#/definitions/app").merge!( "strictProperties" => false ) assert_valid end it "validates strictProperties unsuccessfully" do pointer("#/definitions/app").merge!( "patternProperties" => { "^matches" => {} }, "strictProperties" => true ) data_sample["extra_key"] = "value" data_sample["matches_pattern"] = "yes!" refute_valid missing = @schema.properties.keys.sort - ["name"] assert_includes error_messages, %{"#{missing.join('", "')}" weren't supplied.} assert_includes error_messages, %{"extra_key" is not a permitted key.} assert_includes error_types, :invalid_keys end it "validates allOf" do pointer("#/definitions/app/definitions/contrived").merge!( "allOf" => [ { "maxLength" => 30 }, { "minLength" => 3 } ] ) data_sample["contrived"] = "ab" refute_valid assert_includes error_messages, %{Not all subschemas of "allOf" matched.} assert_includes error_types, :all_of_failed end it "includes the failing condition when validating allOf" do pointer("#/definitions/app/definitions/contrived").merge!( "allOf" => [ { "maxLength" => 30 }, { "minLength" => 3 } ] ) data_sample["contrived"] = "ab" refute_valid assert_includes error_messages, %{At least 3 characters are required; only 2 were supplied.} assert_includes error_data, "ab" end it "includes all failing conditions for allOf as sub-errors when all_of_sub_errors is true" do JsonSchema.configure do |c| c.all_of_sub_errors = true end pointer("#/definitions/app/definitions/contrived").merge!( "allOf" => [ { "minLength" => 5 }, { "minLength" => 3 } ] ) data_sample["contrived"] = "ab" refute_valid assert_includes error_messages, %{Not all subschemas of "allOf" matched.} assert_includes error_types, :all_of_failed all_of_error = @validator.errors.find { |error| error.type == :all_of_failed } sub_error_messages = all_of_error.sub_errors.map { |errors| errors.map(&:message) } sub_error_types = all_of_error.sub_errors.map { |errors| errors.map(&:type) } assert_includes sub_error_messages, [%{At least 3 characters are required; only 2 were supplied.}] assert_includes sub_error_messages, [%{At least 5 characters are required; only 2 were supplied.}] assert_equal sub_error_types, [[:min_length_failed], [:min_length_failed]] assert_includes error_data, "ab" end it "validates anyOf" do pointer("#/definitions/app/definitions/contrived").merge!( "anyOf" => [ { "minLength" => 5 }, { "minLength" => 3 } ] ) data_sample["contrived"] = "ab" refute_valid assert_includes error_messages, %{No subschema in "anyOf" matched.} assert_includes error_types, :any_of_failed any_of_error = @validator.errors.find { |error| error.type == :any_of_failed } sub_error_messages = any_of_error.sub_errors.map { |errors| errors.map(&:message) } sub_error_types = any_of_error.sub_errors.map { |errors| errors.map(&:type) } assert_includes sub_error_messages, [%{At least 5 characters are required; only 2 were supplied.}] assert_includes sub_error_messages, [%{At least 3 characters are required; only 2 were supplied.}] assert_equal sub_error_types, [[:min_length_failed], [:min_length_failed]] assert_includes error_data, "ab" end it "validates oneOf" do pointer("#/definitions/app/definitions/contrived").merge!( "oneOf" => [ { "pattern" => "^(foo|aaa)$" }, { "pattern" => "^(foo|zzz)$" }, { "pattern" => "^(hell|no)$" } ] ) data_sample["contrived"] = "foo" refute_valid assert_includes error_messages, %{More than one subschema in "oneOf" matched.} assert_includes error_types, :one_of_failed one_of_error = @validator.errors.find { |error| error.type == :one_of_failed } sub_error_messages = one_of_error.sub_errors.map { |errors| errors.map(&:message) } sub_error_types = one_of_error.sub_errors.map { |errors| errors.map(&:type) } assert_equal sub_error_messages, [[], [], [%{foo does not match /^(hell|no)$/.}]] assert_equal sub_error_types, [[], [], [:pattern_failed]] assert_includes error_data, "foo" end it "validates not" do pointer("#/definitions/app/definitions/contrived").merge!( "not" => { "pattern" => "^$" } ) data_sample["contrived"] = "" refute_valid assert_includes error_messages, %{Matched "not" subschema.} assert_includes error_types, :not_failed assert_includes error_data, "" end it "validates date format successfully" do pointer("#/definitions/app/definitions/owner").merge!( "format" => "date" ) data_sample["owner"] = "2014-05-13" assert_valid end it "validates date format unsuccessfully" do pointer("#/definitions/app/definitions/owner").merge!( "format" => "date" ) data_sample["owner"] = "13/05/2014" refute_valid end it "validates date-time format successfully" do pointer("#/definitions/app/definitions/owner").merge!( "format" => "date-time" ) data_sample["owner"] = "2014-05-13T08:42:40Z" assert_valid end it "validates date-time format with time zone successfully" do pointer("#/definitions/app/definitions/owner").merge!( "format" => "date-time" ) data_sample["owner"] = "2014-05-13T08:42:40-00:00" assert_valid end it "validates date-time format with time fraction successfully" do pointer("#/definitions/app/definitions/owner").merge!( "format" => "date-time" ) data_sample["owner"] = "2014-05-13T08:42:40.444Z" assert_valid end it "validates date-time format unsuccessfully" do pointer("#/definitions/app/definitions/owner").merge!( "format" => "date-time" ) data_sample["owner"] = "2014-05-13T08:42:40" refute_valid assert_includes error_messages, %{2014-05-13T08:42:40 is not a valid date-time.} assert_includes error_types, :invalid_format assert_includes error_data, "2014-05-13T08:42:40" end it "validates email format successfully" do pointer("#/definitions/app/definitions/owner").merge!( "format" => "email" ) data_sample["owner"] = "dwarf@example.com" assert_valid end it "validates email format with long TLDs successfully" do pointer("#/definitions/app/definitions/owner").merge!( "format" => "email" ) data_sample["owner"] = "dwarf@example.technology" assert_valid end it "validates email format unsuccessfully" do pointer("#/definitions/app/definitions/owner").merge!( "format" => "email" ) data_sample["owner"] = "@example.com" refute_valid assert_includes error_messages, %{@example.com is not a valid email.} assert_includes error_types, :invalid_format end it "validates hostname format successfully" do pointer("#/definitions/app/definitions/owner").merge!( "format" => "hostname" ) data_sample["owner"] = "example.com" assert_valid end it "validates hostname format unsuccessfully" do pointer("#/definitions/app/definitions/owner").merge!( "format" => "hostname" ) data_sample["owner"] = "@example.com" refute_valid assert_includes error_messages, %{@example.com is not a valid hostname.} assert_includes error_types, :invalid_format end it "validates ipv4 format successfully" do pointer("#/definitions/app/definitions/owner").merge!( "format" => "ipv4" ) data_sample["owner"] = "1.2.3.4" assert_valid end it "validates ipv4 format unsuccessfully" do pointer("#/definitions/app/definitions/owner").merge!( "format" => "ipv4" ) data_sample["owner"] = "1.2.3.4.5" refute_valid assert_includes error_messages, %{1.2.3.4.5 is not a valid ipv4.} assert_includes error_types, :invalid_format end it "validates ipv6 format successfully" do pointer("#/definitions/app/definitions/owner").merge!( "format" => "ipv6" ) data_sample["owner"] = "1::3:4:5:6:7:8" assert_valid end it "validates ipv6 format unsuccessfully" do pointer("#/definitions/app/definitions/owner").merge!( "format" => "ipv6" ) data_sample["owner"] = "1::3:4:5:6:7:8:9" refute_valid assert_includes error_messages, %{1::3:4:5:6:7:8:9 is not a valid ipv6.} assert_includes error_types, :invalid_format end it "validates regex format successfully" do pointer("#/definitions/app/definitions/owner").merge!( "format" => "regex" ) data_sample["owner"] = "^owner@heroku\.com$" assert_valid end it "validates regex format successfully" do pointer("#/definitions/app/definitions/owner").merge!( "format" => "regex" ) data_sample["owner"] = "^owner($" refute_valid assert_includes error_messages, %{^owner($ is not a valid regex.} assert_includes error_types, :invalid_format end it "validates absolute uri format successfully" do pointer("#/definitions/app/definitions/owner").merge!( "format" => "uri" ) data_sample["owner"] = "https://example.com" assert_valid end it "validates relative uri format successfully" do pointer("#/definitions/app/definitions/owner").merge!( "format" => "uri" ) data_sample["owner"] = "schemata/app" assert_valid end it "validates uri format unsuccessfully" do pointer("#/definitions/app/definitions/owner").merge!( "format" => "uri" ) data_sample["owner"] = "http://example.com[]" refute_valid assert_includes error_messages, %{http://example.com[] is not a valid uri.} assert_includes error_types, :invalid_format end it "validates absolute uri-reference format successfully" do pointer("#/definitions/app/definitions/owner").merge!( "format" => "uri-reference" ) data_sample["owner"] = "https://example.com" assert_valid end it "validates relative uri format successfully" do pointer("#/definitions/app/definitions/owner").merge!( "format" => "uri" ) data_sample["owner"] = "#hello" assert_valid end it "validates uri format unsuccessfully" do pointer("#/definitions/app/definitions/owner").merge!( "format" => "uri-reference" ) data_sample["owner"] = "http://example.com[]" refute_valid assert_includes error_messages, %{http://example.com[] is not a valid uri-reference.} assert_includes error_types, :invalid_format end it "validates uuid format successfully" do pointer("#/definitions/app/definitions/owner").merge!( "format" => "uuid" ) data_sample["owner"] = "01234567-89ab-cdef-0123-456789abcdef" assert_valid end it "validates uuid format unsuccessfully" do pointer("#/definitions/app/definitions/owner").merge!( "format" => "uuid" ) data_sample["owner"] = "123" refute_valid assert_includes error_messages, %{123 is not a valid uuid.} assert_includes error_types, :invalid_format end it "validates maxLength" do pointer("#/definitions/app/definitions/name").merge!( "maxLength" => 3 ) data_sample["name"] = "abcd" refute_valid assert_includes error_messages, %{Only 3 characters are allowed; 4 were supplied.} assert_includes error_types, :max_length_failed end it "validates minLength" do pointer("#/definitions/app/definitions/name").merge!( "minLength" => 3 ) data_sample["name"] = "ab" refute_valid assert_includes error_messages, %{At least 3 characters are required; only 2 were supplied.} assert_includes error_types, :min_length_failed end it "validates pattern" do pointer("#/definitions/app/definitions/name").merge!( "pattern" => "^[a-z][a-z0-9-]{3,30}$", ) data_sample["name"] = "ab" refute_valid assert_includes error_messages, %{ab does not match /^[a-z][a-z0-9-]{3,30}$/.} assert_includes error_types, :pattern_failed assert_includes error_data, "ab" end it "builds appropriate JSON Pointers to bad data" do pointer("#/definitions/app/definitions/visibility").merge!( "enum" => ["private", "public"] ) data_sample["visibility"] = "personal" refute_valid assert_equal "#/visibility", @validator.errors[0].pointer end =begin it "handles a validation loop" do pointer("#/definitions/app").merge!( "not" => { "$ref" => "#/definitions/app" } ) data_sample["visibility"] = "personal" refute_valid assert_includes error_messages, %{Validation loop detected.} end =end it "validates custom formats successfully" do JsonSchema.configure do |c| c.register_format "the-answer", ->(data) { data.to_i == 42 } end pointer("#/definitions/app/definitions/owner").merge!( "format" => "the-answer" ) data_sample["owner"] = "42" assert_valid end it "validates custom formats unsuccessfully" do JsonSchema.configure do |c| c.register_format "the-answer", ->(data) { data.to_i == 42 } end pointer("#/definitions/app/definitions/owner").merge!( "format" => "the-answer" ) data_sample["owner"] = "43" refute_valid assert_includes error_messages, %{43 is not a valid the-answer.} assert_includes error_types, :invalid_format end it "raises an aggregate error with validate!" do pointer("#/definitions/app").merge!( "type" => ["object"] ) schema = JsonSchema.parse!(schema_sample) schema.expand_references! schema = schema.definitions["app"] validator = JsonSchema::Validator.new(schema) # don't bother checking the particulars of the error here because we have # other tests for that above assert_raises JsonSchema::AggregateError do validator.validate!(4) end end def data_sample @data_sample ||= DataScaffold.data_sample end def error_messages @validator.errors.map(&:message) end def error_data @validator.errors.map(&:data) end def error_types @validator.errors.map(&:type) end def pointer(path) JsonPointer.evaluate(schema_sample, path) end def validate_parentless_pattern schema = { "$schema" => "http://json-schema.org/draft-04/hyper-schema", "patternProperties" => { "^S_" => { "type" => [ "string" ] } } } schema = JsonSchema.parse!(schema) @validator = JsonSchema::Validator.new(schema) @validator.validate(data_sample) end def schema_sample @schema_sample ||= DataScaffold.schema_sample end def validator @schema = JsonSchema.parse!(schema_sample) @schema.expand_references! @schema = @schema.definitions["app"] JsonSchema::Validator.new(@schema) end # assert_valid asserts that both the "fail fast" and the "full error messages" # code paths consider the data sample valid for the set schema. def assert_valid @validator = validator assert @validator.validate(data_sample, fail_fast: true) assert @validator.validate(data_sample, fail_fast: false) end # refute_valid asserts that both the "fail fast" and the "full error messages" # code paths consider the data sample erroneous for the set schema. def refute_valid @validator = validator refute @validator.validate(data_sample, fail_fast: true) refute @validator.validate(data_sample, fail_fast: false) end end json-schema-0.19.1/test/json_schema_test.rb0000644000175000017500000000203213374107723020227 0ustar boutilboutilrequire "test_helper" require "json_schema" describe JsonSchema do describe ".parse" do it "succeeds" do schema, errors = JsonSchema.parse(schema_sample) assert schema assert_nil errors end it "returns errors on a parsing problem" do pointer("#/properties").merge!( "app" => 4 ) schema, errors = JsonSchema.parse(schema_sample) refute schema assert_includes errors.map { |e| e.type }, :schema_not_found end end describe ".parse!" do it "succeeds on .parse!" do assert JsonSchema.parse!(schema_sample) end it "returns errors on a parsing problem" do pointer("#/properties").merge!( "app" => 4 ) e = assert_raises(JsonSchema::AggregateError) do JsonSchema.parse!(schema_sample) end assert_includes e.message, %{4 is not a valid schema.} end end def pointer(path) JsonPointer.evaluate(schema_sample, path) end def schema_sample @schema_sample ||= DataScaffold.schema_sample end end json-schema-0.19.1/test/commands/0000755000175000017500000000000013374107723016156 5ustar boutilboutiljson-schema-0.19.1/test/commands/validate_schema_test.rb0000644000175000017500000000607413374107723022662 0ustar boutilboutilrequire "test_helper" require "commands/validate_schema" require "tempfile" describe Commands::ValidateSchema do before do @command = Commands::ValidateSchema.new end it "shows usage with no arguments" do success = @command.run([]) assert_equal [], @command.errors assert_equal [], @command.messages refute success end it "runs successfully in fail fast mode" do temp_file(basic_schema) do |path| @command.fail_fast = true success = @command.run([schema_path, path]) assert_equal [], @command.errors assert_equal ["#{path} is valid."], @command.messages assert success end end it "runs successfully in detect mode" do temp_file(basic_schema) do |path| @command.extra_schemas << schema_path @command.detect = true success = @command.run([path]) assert_equal [], @command.errors assert_equal ["#{path} is valid."], @command.messages assert success end end it "runs successfully out of detect mode" do temp_file(basic_schema) do |path| @command.detect = false success = @command.run([schema_path, path]) assert_equal [], @command.errors assert_equal ["#{path} is valid."], @command.messages assert success end end it "takes extra schemas" do temp_file(basic_hyper_schema) do |path| @command.detect = false @command.extra_schemas << schema_path success = @command.run([hyper_schema_path, path]) assert_equal [], @command.errors assert_equal ["#{path} is valid."], @command.messages assert success end end it "requires at least one argument in detect mode" do @command.detect = true success = @command.run([]) assert_equal [], @command.errors assert_equal [], @command.messages refute success end it "requires at least two arguments out of detect mode" do @command.detect = false success = @command.run([hyper_schema_path]) assert_equal [], @command.errors assert_equal [], @command.messages refute success end it "errors on invalid files" do @command.detect = false success = @command.run(["dne-1", "dne-2"]) assert_equal ["dne-1: No such file or directory."], @command.errors assert_equal [], @command.messages refute success end it "errors on empty files" do temp_file("") do |path| success = @command.run([hyper_schema_path, path]) assert_equal ["#{path}: File is empty."], @command.errors refute success end end def basic_hyper_schema <<-eos { "$schema": "http://json-schema.org/draft-04/hyper-schema" } eos end def basic_schema <<-eos { "$schema": "http://json-schema.org/draft-04/schema" } eos end def hyper_schema_path File.expand_path("schema.json", "#{__FILE__}/../../../schemas") end def schema_path File.expand_path("schema.json", "#{__FILE__}/../../../schemas") end def temp_file(contents) file = Tempfile.new("schema") file.write(contents) file.size() # flush yield(file.path) ensure file.close file.unlink end end json-schema-0.19.1/test/data_scaffold.rb0000644000175000017500000001665113374107723017465 0ustar boutilboutilmodule DataScaffold def self.data_sample { "name" => "cloudnasium" } end def self.schema_sample { "$schema" => "http://json-schema.org/draft-04/hyper-schema", "title" => "Example API", "description" => "An example API.", "type" => [ "object" ], "definitions" => { "app" => { "$schema" => "http://json-schema.org/draft-04/hyper-schema", "title" => "App", "description" => "An app.", "id" => "schemata/app", "type" => [ "object" ], "definitions" => { "config_vars" => { "patternProperties" => { "^\\w+$" => { "type" => ["null", "string"] } } }, "contrived" => { "allOf" => [ { "maxLength" => 30 }, { "minLength" => 3 } ], "anyOf" => [ { "minLength" => 3 }, { "minLength" => 5 } ], "oneOf" => [ { "pattern" => "^(foo|aaa)$" }, { "pattern" => "^(foo|zzz)$" } ], "not" => { "pattern" => "^$" } }, "contrived_plus" => { "allOf" => [ { "$ref" => "/schemata/app#/definitions/contrived/allOf/0" }, { "$ref" => "/schemata/app#/definitions/contrived/allOf/1" } ], "anyOf" => [ { "$ref" => "/schemata/app#/definitions/contrived/anyOf/0" }, { "$ref" => "/schemata/app#/definitions/contrived/anyOf/1" } ], "oneOf" => [ { "$ref" => "/schemata/app#/definitions/contrived/oneOf/0" }, { "$ref" => "/schemata/app#/definitions/contrived/oneOf/1" } ], "not" => { "$ref" => "/schemata/app#/definitions/contrived/not" } }, "cost" => { "description" => "running price of an app", "example" => 35.01, "maximum" => 1000.00, "exclusiveMaximum" => true, "minimum" => 0.0, "exclusiveMinimum" => false, "multipleOf" => 0.01, "readOnly" => false, "type" => ["number"], }, "flags" => { "description" => "flags for an app", "example" => ["websockets"], "items" => { "pattern" => "^[a-z][a-z\\-]*[a-z]$" }, "maxItems" => 10, "minItems" => 1, "readOnly" => false, "type" => ["array"], "uniqueItems" => true }, "id" => { "description" => "integer identifier of an app", "example" => 1, "maximum" => 10000, "exclusiveMaximum" => false, "minimum" => 0, "exclusiveMinimum" => true, "multipleOf" => 1, "readOnly" => true, "type" => ["integer"], }, "identity" => { "anyOf" => [ { "$ref" => "/schemata/app#/definitions/id" }, { "$ref" => "/schemata/app#/definitions/name" }, ] }, "name" => { "default" => "hello-world", "description" => "unique name of app", "example" => "name", "maxLength" => 30, "minLength" => 3, "pattern" => "^[a-z][a-z0-9-]{3,30}$", "readOnly" => false, "type" => ["string"] }, "owner" => { "description" => "owner of the app", "format" => "email", "example" => "dwarf@example.com", "readOnly" => false, "type" => ["string"] }, "production" => { "description" => "whether this is a production app", "example" => false, "readOnly" => false, "type" => ["boolean"] }, "role" => { "description" => "name of a role on an app", "example" => "collaborator", "readOnly" => true, "type" => ["string"], }, "roles" => { "additionalProperties" => true, "patternProperties" => { "^\\w+$" => { "$ref" => "/schemata/app#/definitions/role" } } }, "ssl" => { "description" => "whether this app has SSL termination", "example" => false, "readOnly" => false, "type" => ["boolean"] }, "visibility" => { "description" => "the visibility of hte app", "enum" => ["private", "public"], "example" => false, "readOnly" => false, "type" => ["string"] }, }, "properties" => { "config_vars" => { "$ref" => "/schemata/app#/definitions/config_vars" }, "contrived" => { "$ref" => "/schemata/app#/definitions/contrived" }, "cost" => { "$ref" => "/schemata/app#/definitions/cost" }, "flags" => { "$ref" => "/schemata/app#/definitions/flags" }, "id" => { "$ref" => "/schemata/app#/definitions/id" }, "name" => { "$ref" => "/schemata/app#/definitions/name" }, "owner" => { "$ref" => "/schemata/app#/definitions/owner" }, "production" => { "$ref" => "/schemata/app#/definitions/production" }, "ssl" => { "$ref" => "/schemata/app#/definitions/ssl" }, "visibility" => { "$ref" => "/schemata/app#/definitions/visibility" } }, "additionalProperties" => false, "dependencies" => { "production" => "ssl", "ssl" => { "properties" => { "cost" => { "minimum" => 20.0, }, "name" => { "$ref" => "/schemata/app#/definitions/name" }, } } }, "maxProperties" => 10, "minProperties" => 1, "required" => ["name"], "links" => [ "description" => "Create a new app.", "href" => "/apps", "method" => "POST", "rel" => "create", "schema" => { "properties" => { "name" => { "$ref" => "#/definitions/app/definitions/name" }, } }, "targetSchema" => { "$ref" => "#/definitions/app" } ], "media" => { "type" => "application/json" }, "pathStart" => "/", "readOnly" => false } }, "properties" => { "app" => { "$ref" => "#/definitions/app" }, }, "links" => [ { "href" => "http://example.com", "rel" => "self" } ] } end end json-schema-0.19.1/test/test_helper.rb0000644000175000017500000000074013374107723017221 0ustar boutilboutilif RUBY_VERSION >= '2.0.0' require 'simplecov' SimpleCov.start do # We do our utmost to test our executables by modularizing them into # testable pieces, but testing them to completion is nearly impossible as # far as I can tell, so include them in our tests but don't calculate # coverage. add_filter "/bin/" add_filter "/test/" end end require "minitest" require "minitest/autorun" #require "pry-rescue/minitest" require_relative "data_scaffold" json-schema-0.19.1/test/json_reference/0000755000175000017500000000000013374107723017344 5ustar boutilboutiljson-schema-0.19.1/test/json_reference/reference_test.rb0000644000175000017500000000223013374107723022663 0ustar boutilboutilrequire "test_helper" require "json_reference" describe JsonReference::Reference do it "expands a reference without a URI" do ref = reference("#/definitions") assert_nil ref.uri assert_equal "#/definitions", ref.pointer end it "expands a reference with a URI" do ref = reference("http://example.com#/definitions") assert_equal URI.parse("http://example.com"), ref.uri assert_equal "#/definitions", ref.pointer end it "expands just a root sign" do ref = reference("#") assert_nil ref.uri assert_equal "#", ref.pointer end it "expands a URI with just a root sign" do ref = reference("http://example.com#") assert_equal URI.parse("http://example.com"), ref.uri assert_equal "#", ref.pointer end it "normalizes pointers by adding a root sign prefix" do ref = reference("/definitions") assert_nil ref.uri assert_equal "#/definitions", ref.pointer end it "normalizes pointers by stripping a trailing slash" do ref = reference("#/definitions/") assert_nil ref.uri assert_equal "#/definitions", ref.pointer end def reference(str) JsonReference::Reference.new(str) end end json-schema-0.19.1/bin/0000755000175000017500000000000013374107723014146 5ustar boutilboutiljson-schema-0.19.1/bin/validate-schema0000755000175000017500000000211313374107723017120 0ustar boutilboutil#!/usr/bin/env ruby require "optparse" require_relative "../lib/commands/validate_schema" def print_usage! $stderr.puts "Usage: validate-schema , ..." $stderr.puts " validate-schema -d , ..." end command = Commands::ValidateSchema.new parser = OptionParser.new { |opts| opts.on("-d", "--detect", "Detect schema from $schema") do command.detect = true # mix in common schemas for convenience command.extra_schemas += ["schema.json", "hyper-schema.json"]. map { |f| File.expand_path(f, __FILE__ + "/../../schemas") } end opts.on("-s", "--schema SCHEMA", "Additional schema to use for references") do |s| command.extra_schemas << s end opts.on("-f", "--fail-fast", "Abort after encountering the first validation error") do |s| command.fail_fast = true end } if $0 == __FILE__ parser.parse! success = command.run(ARGV.dup) if success command.messages.each { |m| $stdout.puts(m) } elsif !command.errors.empty? command.errors.each { |e| $stderr.puts(e) } exit(1) else print_usage! exit(1) end end json-schema-0.19.1/LICENSE0000644000175000017500000000211013374107723014375 0ustar boutilboutilThe MIT License (MIT) Copyright (c) 2014-2015 Brandur and contributors Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.