model_tokenizer-1.0.2/0000755000004100000410000000000013704151140014730 5ustar www-datawww-datamodel_tokenizer-1.0.2/Gemfile.lock0000644000004100000410000000240613704151140017154 0ustar www-datawww-dataPATH remote: . specs: model_tokenizer (1.0.2) activerecord (~> 4.0) GEM remote: https://rubygems.org/ specs: activemodel (4.2.11.3) activesupport (= 4.2.11.3) builder (~> 3.1) activerecord (4.2.11.3) activemodel (= 4.2.11.3) activesupport (= 4.2.11.3) arel (~> 6.0) activesupport (4.2.11.3) i18n (~> 0.7) minitest (~> 5.1) thread_safe (~> 0.3, >= 0.3.4) tzinfo (~> 1.1) arel (6.0.4) builder (3.2.4) concurrent-ruby (1.1.6) coveralls (0.8.23) json (>= 1.8, < 3) simplecov (~> 0.16.1) term-ansicolor (~> 1.3) thor (>= 0.19.4, < 2.0) tins (~> 1.6) docile (1.3.2) i18n (0.9.5) concurrent-ruby (~> 1.0) json (2.3.0) minitest (5.14.1) rake (13.0.1) simplecov (0.16.1) docile (~> 1.1) json (>= 1.8, < 3) simplecov-html (~> 0.10.0) simplecov-html (0.10.2) sqlite3 (1.3.13) sync (0.5.0) term-ansicolor (1.7.1) tins (~> 1.0) thor (1.0.1) thread_safe (0.3.6) tins (1.25.0) sync tzinfo (1.2.7) thread_safe (~> 0.1) PLATFORMS ruby DEPENDENCIES bundler (~> 1.7) coveralls minitest model_tokenizer! rake (>= 12.3.3) sqlite3 (< 1.4) BUNDLED WITH 1.17.2 model_tokenizer-1.0.2/.travis.yml0000644000004100000410000000031013704151140017033 0ustar www-datawww-datalanguage: ruby cache: bundler sudo: false rvm: - 2.0.0 script: 'bundle exec rake' notifications: email: recipients: - adib.saad@gmail.com on_failure: change on_success: never model_tokenizer-1.0.2/test/0000755000004100000410000000000013704151140015707 5ustar www-datawww-datamodel_tokenizer-1.0.2/test/helper.rb0000644000004100000410000000446213704151140017521 0ustar www-datawww-datarequire "bundler/setup" require 'coveralls' Coveralls.wear! require "active_record" #Put all your 'at_exit's that you want executed after #running your tests BEFORE requiring minitest, #as minitest uses at_exit to run the tests. #Putting the follow line after requiring minitest will #close the database connection before the first test runs. at_exit {ActiveRecord::Base.connection.disconnect!} require "minitest/autorun" Minitest::Test = MiniTest::Unit::TestCase unless defined?(Minitest::Test) #require "mocha/setup" # if ENV["COVERAGE"] # require 'simplecov' # SimpleCov.start do # add_filter "test/" # add_filter "friendly_id/migration" # end # end # I18n.enforce_available_locales = false require 'model_tokenizer' # If you want to see the ActiveRecord log, invoke the tests using `rake test LOG=true` # if ENV["LOG"] # require "logger" # ActiveRecord::Base.logger = Logger.new($stdout) # end module ModelTokenizer module Test def self.included(base) MiniTest::Unit.autorun end def transaction ActiveRecord::Base.transaction { yield ; raise ActiveRecord::Rollback } end def with_instance_of(*args) model_class = args.shift args[0] ||= {:data => "some data goes here"} transaction { yield model_class.create!(*args) } end module Database extend self def connect version = ActiveRecord::VERSION::STRING driver = ModelTokenizer::Test::Database.driver engine = RUBY_ENGINE rescue "ruby" ActiveRecord::Base.establish_connection config[driver] message = "Using #{engine} #{RUBY_VERSION} AR #{version} with #{driver}" puts "-" * 72 if in_memory? ActiveRecord::Migration.verbose = false Schema.migrate :up puts "#{message} (in-memory)" else puts message end end def config @config ||= YAML::load(File.open(File.expand_path("../databases.yml", __FILE__))) end def driver (ENV["DB"] or "sqlite3").downcase end def in_memory? config[driver]["database"] == ":memory:" end end end end class Module def test(name, &block) define_method("test_#{name.gsub(/[^a-z0-9']/i, "_")}".to_sym, &block) end end require "schema" ModelTokenizer::Test::Database.connect model_tokenizer-1.0.2/test/databases.yml0000644000004100000410000000010413704151140020354 0ustar www-datawww-datasqlite3: adapter: sqlite3 database: ":memory:" encoding: utf8 model_tokenizer-1.0.2/test/token_generator_test.rb0000644000004100000410000000223013704151140022456 0ustar www-datawww-datarequire "helper" class Car < ActiveRecord::Base extend ModelTokenizer has_token end class Truck < ActiveRecord::Base extend ModelTokenizer has_token :length => 16 end class TokenGenerator < MiniTest::Test include ModelTokenizer::Test def setup Car.all.each(&:destroy) Truck.all.each(&:destroy) end def test_that_tokens_are_created_for_models with_instance_of(Car) do |record| assert record.token, "Token is nil" assert record.token.length == Car::model_tokenizer_token_length, "Token length is not #{Car::model_tokenizer_token_length}" record.token.split("").each do |c| assert ModelTokenizer::Base::CHARSET.include?(c), "#{c} doesn't belong in the acceptable character set" end end with_instance_of(Truck) do |record| assert record.token, "Token is nil" assert record.token.length == Truck::model_tokenizer_token_length, "Token length is not #{Truck::model_tokenizer_token_length}" record.token.split("").each do |c| assert ModelTokenizer::Base::CHARSET.include?(c), "#{c} doesn't belong in the acceptable character set" end end end end model_tokenizer-1.0.2/test/schema.rb0000644000004100000410000000105013704151140017470 0ustar www-datawww-datamodule ModelTokenizer module Test class Schema < ActiveRecord::Migration class << self def down drop_table :cars end def up # TODO: use schema version to avoid ugly hacks like this return if @done create_table :cars do |t| t.string :data t.string :token end create_table :trucks do |t| t.string :data t.string :token end @done = true end end end end end model_tokenizer-1.0.2/README.md0000644000004100000410000000430413704151140016210 0ustar www-datawww-data# ModelTokenizer [![Build Status](https://travis-ci.org/adibsaad/model_tokenizer.svg?branch=master)](https://travis-ci.org/adibsaad/model_tokenizer) [![Coverage Status](https://coveralls.io/repos/adibsaad/model_tokenizer/badge.svg)](https://coveralls.io/r/adibsaad/model_tokenizer) Generates random tokens that models can be accessed by. Instead of ``` somesite.com/video/71589 ``` you'll get ``` somesite.com/video/j5-drkENpSDBNRds ``` ## Installation Add this line to your application's Gemfile: ```ruby gem 'model_tokenizer' ``` And then execute: $ bundle Or install it yourself as: $ gem install model_tokenizer ## Usage Run $ rails g model_tokenizer MODEL_NAME [field:type field:type ... ] to create a new tokenized model. If the model already exists, ModelTokenizer will integrate into it by injecting the following code ```ruby extend ModelTokenizer has_token self.primary_key = :token ``` The appropriate migration will also be created, which will create the ```token``` field and its associated unique index. The default token length is 14, but you can change it (no lower than 8) ```ruby has_token :length => 16 ``` ## Notes ModelTokenizer generates tokens from the following charset: ``` a b c d e f g h i j k m n o p q r s t u v w x y z A B C D E F G H J K L M N P R S T W X Y Z 2 3 4 5 6 7 8 9 - _ ``` As you may have noticed, the following ambiguous characters have been removed * Lowercase: l * Uppercase: I, O, Q, U, V * Numerals: 1, 0 However, the gem doesn't check for awkward tokens that could be confusing, has too many repeating characters, too many underscores/hyphens or otherwise makes someone raise an eyebrow (e.g. DXMHMHLALAH, _-aj-a2j6f-qacins-). Additionally, ModelTokenizer doesn't detect whether or not it has run out of combinations for generating new tokens, though this will be dealt with in the future. ModelTokenizer has been tested with Rails 3 and 4. ## Contributing 1. Fork it ( https://github.com/adibsaad/model_tokenizer/fork ) 2. Create your feature branch (`git checkout -b my-new-feature`) 3. Commit your changes (`git commit -am 'Add some feature'`) 4. Push to the branch (`git push origin my-new-feature`) 5. Create a new Pull Request model_tokenizer-1.0.2/.gitignore0000644000004100000410000000015613704151140016722 0ustar www-datawww-data/.bundle/ /.yardoc /_yardoc/ /coverage/ /doc/ /pkg/ /spec/reports/ /tmp/ *.bundle *.so *.o *.a *.gem mkmf.log model_tokenizer-1.0.2/model_tokenizer.gemspec0000644000004100000410000000164013704151140021470 0ustar www-datawww-data# coding: utf-8 lib = File.expand_path('../lib', __FILE__) $LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib) require 'model_tokenizer/version' Gem::Specification.new do |spec| spec.name = "model_tokenizer" spec.version = ModelTokenizer::VERSION spec.authors = ["Adib Saad"] spec.email = ["adib.saad@gmail.com"] spec.summary = %q{Random token generator for Rails models} spec.homepage = "https://github.com/adibsaad/model_tokenizer" spec.license = "MIT" spec.description = %q{ModelTokenizer creates random tokens to be used as primary keys for ActiveRecord objects} spec.files = `git ls-files`.split("\n") spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) } spec.test_files = spec.files.grep(%r{^(test|spec|features)/}) spec.require_paths = ["lib"] spec.add_dependency("activerecord", "~> 4.0") end model_tokenizer-1.0.2/Rakefile0000644000004100000410000000035313704151140016376 0ustar www-datawww-datarequire "bundler/gem_tasks" require "rake/testtask" desc 'Test model_tokenizer.' Rake::TestTask.new do |t| t.libs << 'test' t.test_files = FileList['test/*_test.rb'] t.verbose = true end desc "Run tests" task :default => :test model_tokenizer-1.0.2/lib/0000755000004100000410000000000013704151140015476 5ustar www-datawww-datamodel_tokenizer-1.0.2/lib/model_tokenizer/0000755000004100000410000000000013704151140020670 5ustar www-datawww-datamodel_tokenizer-1.0.2/lib/model_tokenizer/version.rb0000644000004100000410000000005613704151140022703 0ustar www-datawww-datamodule ModelTokenizer VERSION = "1.0.2" end model_tokenizer-1.0.2/lib/model_tokenizer/base.rb0000644000004100000410000000221313704151140022125 0ustar www-datawww-datamodule ModelTokenizer module Base CHARSET = %w{ a b c d e f g h i j k m n o p q r s t u v w x y z A B C D E F G H J K L M N P R S T W X Y Z 2 3 4 5 6 7 8 9 - _ } #Default length is 14 characters. Provides #56!/(14!*(56-14)!) = 5,804,731,963,800 unique tokens. @@model_tokenizer_token_length = 14 def model_tokenizer_token_length @@model_tokenizer_token_length end def has_token(*attributes) options = { :length => @@model_tokenizer_token_length }.merge!(attributes.last.is_a?(Hash) ? attributes.pop : {}) if(!options[:length].is_a?(Integer) || options[:length] < 8) options[:length] = @@model_tokenizer_token_length end @@model_tokenizer_token_length = options[:length] include InstanceMethods end module InstanceMethods protected def generate_token self.token = loop do random_token = (0...self.class.model_tokenizer_token_length).map{CHARSET[rand(CHARSET.size)]}.join break random_token unless self.class.exists?(:token => random_token) end end end end end model_tokenizer-1.0.2/lib/model_tokenizer.rb0000644000004100000410000000046713704151140021224 0ustar www-datawww-datarequire "model_tokenizer/version" require "model_tokenizer/base" module ModelTokenizer def self.extended(base) return if base.respond_to? :model_tokenizer base.class_eval do extend Base before_create :generate_token end end def self.included(base) base.extend self end end model_tokenizer-1.0.2/lib/generators/0000755000004100000410000000000013704151140017647 5ustar www-datawww-datamodel_tokenizer-1.0.2/lib/generators/model_tokenizer/0000755000004100000410000000000013704151140023041 5ustar www-datawww-datamodel_tokenizer-1.0.2/lib/generators/model_tokenizer/templates/0000755000004100000410000000000013704151140025037 5ustar www-datawww-datamodel_tokenizer-1.0.2/lib/generators/model_tokenizer/templates/migration_existing.rb0000644000004100000410000000067513704151140031277 0ustar www-datawww-dataclass AddModelTokenizerTokenTo<%= table_name.camelize %> < ActiveRecord::Migration def self.up change_table(:<%= table_name %>) do |t| <% attributes.each do |attribute| -%> t.<%= attribute.type %> :<%= attribute.name %> <% end -%> <%= migration_data -%> end add_index :<%= table_name %>, :token, unique: true end def self.down # Impelement rollback yourself. raise ActiveRecord::IrreversibleMigration end end model_tokenizer-1.0.2/lib/generators/model_tokenizer/templates/migration.rb0000644000004100000410000000054313704151140027357 0ustar www-datawww-dataclass ModelTokenizerCreate<%= table_name.camelize %> < ActiveRecord::Migration def change create_table(:<%= table_name %>) do |t| <% attributes.each do |attribute| -%> t.<%= attribute.type %> :<%= attribute.name %> <% end -%> <%= migration_data -%> t.timestamps end add_index :<%= table_name %>, :token, unique: true end end model_tokenizer-1.0.2/lib/generators/model_tokenizer/model_tokenizer_generator.rb0000644000004100000410000000446713704151140030641 0ustar www-datawww-datarequire 'rails/generators/active_record' require 'rails/generators/migration' module ModelTokenizer module Generators class ModelTokenizerGenerator < ActiveRecord::Generators::Base include Rails::Generators::Migration argument :attributes, type: :array, default: [], banner: "field:type field:type" namespace "model_tokenizer" source_root File.expand_path("../templates", __FILE__) desc "Creates a model with the NAME argument. "\ "If the model already exists, the appropriate code will be appended instead. "\ "In either case, the appropriate migration will be created." def create_migration_file if (behavior == :invoke && model_exists?) || (behavior == :revoke && migration_exists?(table_name)) migration_template "migration_existing.rb", "db/migrate/add_model_tokenizer_token_to_#{table_name}.rb" else migration_template "migration.rb", "db/migrate/model_tokenizer_create_#{table_name}.rb" end end def generate_model invoke "active_record:model", [name], migration: false unless model_exists? && behavior == :invoke end def inject_model_tokenizer_content content = model_contents class_path = if namespaced? class_name.to_s.split("::") else [class_name] end indent_depth = class_path.size - 1 content = content.split("\n").map { |line| " " * indent_depth + line } .join("\n") << "\n" inject_into_class(model_path, class_path.last, content) if model_exists? end private def migration_data < false, :default => "" RUBY end def model_contents <<-CONTENT extend ModelTokenizer has_token #:length => 14 self.primary_key = :token CONTENT end def model_exists? File.exists?(File.join(destination_root, model_path)) end def migration_path @migration_path ||= File.join("db", "migrate") end def model_path @model_path ||= File.join("app", "models", "#{file_path}.rb") end def migration_exists?(table_name) Dir.glob("#{File.join(destination_root, migration_path)}/[0-9]*_*.rb").grep(/\d+_add_model_tokenizer_token_to_#{table_name}.rb$/).first end end end end model_tokenizer-1.0.2/Gemfile0000644000004100000410000000035113704151140016222 0ustar www-datawww-datasource 'https://rubygems.org' # Specify your gem's dependencies in model_tokenizer.gemspec gemspec group :test do gem "bundler", "~> 1.7" gem "rake", ">= 12.3.3" gem "sqlite3", "< 1.4" gem "minitest" gem "coveralls" end model_tokenizer-1.0.2/.coveralls.yml0000644000004100000410000000003013704151140017514 0ustar www-datawww-dataservice_name: travis-ci model_tokenizer-1.0.2/LICENSE.txt0000644000004100000410000000205213704151140016552 0ustar www-datawww-dataCopyright (c) 2014 Adib Saad MIT License Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.