activerecord-import-0.28.2/0000755000004100000410000000000013424140671015617 5ustar www-datawww-dataactiverecord-import-0.28.2/.travis.yml0000644000004100000410000000350413424140671017732 0ustar www-datawww-datalanguage: ruby cache: bundler rvm: - 2.3.7 env: global: # https://github.com/discourse/discourse/blob/master/.travis.yml - RUBY_GC_MALLOC_LIMIT=50000000 matrix: - AR_VERSION=3.2 - AR_VERSION=4.0 - AR_VERSION=4.1 - AR_VERSION=4.2 - AR_VERSION=5.0 - AR_VERSION=5.1 - AR_VERSION=5.2 matrix: include: - rvm: jruby-9.1.14.0 env: AR_VERSION=4.2 script: - bundle exec rake test:jdbcsqlite3 - bundle exec rake test:jdbcmysql - bundle exec rake test:jdbcpostgresql fast_finish: true addons: postgresql: "9.5" apt: sources: - travis-ci/sqlite3 - mysql-5.7-trusty packages: - sqlite3 - mysql-server - mysql-client - postgresql-9.5-postgis-2.3 before_install: - gem update --system - sudo mysql -e "use mysql; update user set authentication_string=PASSWORD('') where User='root'; update user set plugin='mysql_native_password';FLUSH PRIVILEGES;" - sudo mysql_upgrade - sudo service mysql restart before_script: - mysql -e 'create database activerecord_import_test;' - psql -c 'create database activerecord_import_test;' -U postgres - psql activerecord_import_test -c 'create extension if not exists hstore;' -U postgres - psql -c 'create extension if not exists postgis;' -U postgres - psql -c 'create extension if not exists "uuid-ossp";' -U postgres - cp test/travis/database.yml test/database.yml script: - bundle exec rake test:mysql2 - bundle exec rake test:mysql2_makara - bundle exec rake test:mysql2spatial - bundle exec rake test:postgis - bundle exec rake test:postgresql - bundle exec rake test:postgresql_makara - bundle exec rake test:seamless_database_pool - bundle exec rake test:spatialite - bundle exec rake test:sqlite3 - bundle exec rubocop dist: trusty sudo: required activerecord-import-0.28.2/test/0000755000004100000410000000000013424140671016576 5ustar www-datawww-dataactiverecord-import-0.28.2/test/database.yml.sample0000644000004100000410000000135613424140671022352 0ustar www-datawww-datacommon: &common username: root password: encoding: utf8 host: localhost database: activerecord_import_test mysql2: &mysql2 <<: *common adapter: mysql2 mysql2spatial: <<: *mysql2 mysql2_makara: <<: *mysql2 postgresql: &postgresql <<: *common username: postgres adapter: postgresql min_messages: warning postresql_makara: <<: *postgresql postgis: <<: *postgresql oracle: <<: *common adapter: oracle min_messages: debug seamless_database_pool: <<: *common adapter: seamless_database_pool prepared_statements: false pool_adapter: mysql2 master: host: localhost sqlite: adapter: sqlite dbfile: test.db sqlite3: &sqlite3 adapter: sqlite3 database: test.db spatialite: <<: *sqlite3 activerecord-import-0.28.2/test/mysql2_makara/0000755000004100000410000000000013424140671021341 5ustar www-datawww-dataactiverecord-import-0.28.2/test/mysql2_makara/import_test.rb0000644000004100000410000000042413424140671024237 0ustar www-datawww-datarequire File.expand_path(File.dirname(__FILE__) + '/../test_helper') require File.expand_path(File.dirname(__FILE__) + '/../support/assertions') require File.expand_path(File.dirname(__FILE__) + '/../support/mysql/import_examples') should_support_mysql_import_functionality activerecord-import-0.28.2/test/adapters/0000755000004100000410000000000013424140671020401 5ustar www-datawww-dataactiverecord-import-0.28.2/test/adapters/mysql2_makara.rb0000644000004100000410000000004013424140671023463 0ustar www-datawww-dataENV["ARE_DB"] = "mysql2_makara" activerecord-import-0.28.2/test/adapters/postgresql_makara.rb0000644000004100000410000000003513424140671024443 0ustar www-datawww-dataENV["ARE_DB"] = "postgresql" activerecord-import-0.28.2/test/adapters/postgis.rb0000644000004100000410000000003213424140671022411 0ustar www-datawww-dataENV["ARE_DB"] = "postgis" activerecord-import-0.28.2/test/adapters/jdbcmysql.rb0000644000004100000410000000003413424140671022713 0ustar www-datawww-dataENV["ARE_DB"] = "jdbcmysql" activerecord-import-0.28.2/test/adapters/seamless_database_pool.rb0000644000004100000410000000005113424140671025413 0ustar www-datawww-dataENV["ARE_DB"] = "seamless_database_pool" activerecord-import-0.28.2/test/adapters/makara_postgis.rb0000644000004100000410000000003213424140671023725 0ustar www-datawww-dataENV["ARE_DB"] = "postgis" activerecord-import-0.28.2/test/adapters/jdbcsqlite3.rb0000644000004100000410000000003613424140671023134 0ustar www-datawww-dataENV["ARE_DB"] = "jdbcsqlite3" activerecord-import-0.28.2/test/adapters/mysql2.rb0000644000004100000410000000003113424140671022147 0ustar www-datawww-dataENV["ARE_DB"] = "mysql2" activerecord-import-0.28.2/test/adapters/postgresql.rb0000644000004100000410000000003513424140671023127 0ustar www-datawww-dataENV["ARE_DB"] = "postgresql" activerecord-import-0.28.2/test/adapters/spatialite.rb0000644000004100000410000000003513424140671023063 0ustar www-datawww-dataENV["ARE_DB"] = "spatialite" activerecord-import-0.28.2/test/adapters/sqlite3.rb0000644000004100000410000000003213424140671022305 0ustar www-datawww-dataENV["ARE_DB"] = "sqlite3" activerecord-import-0.28.2/test/adapters/mysql2spatial.rb0000644000004100000410000000004013424140671023525 0ustar www-datawww-dataENV["ARE_DB"] = "mysql2spatial" activerecord-import-0.28.2/test/adapters/jdbcpostgresql.rb0000644000004100000410000000004113424140671023747 0ustar www-datawww-dataENV["ARE_DB"] = "jdbcpostgresql" activerecord-import-0.28.2/test/schema/0000755000004100000410000000000013424140671020036 5ustar www-datawww-dataactiverecord-import-0.28.2/test/schema/jdbcpostgresql_schema.rb0000644000004100000410000000011013424140671024721 0ustar www-datawww-datarequire File.expand_path(File.dirname(__FILE__) + '/postgresql_schema') activerecord-import-0.28.2/test/schema/postgis_schema.rb0000644000004100000410000000011013424140671023363 0ustar www-datawww-datarequire File.expand_path(File.dirname(__FILE__) + '/postgresql_schema') activerecord-import-0.28.2/test/schema/version.rb0000644000004100000410000000042113424140671022045 0ustar www-datawww-dataclass SchemaInfo < ActiveRecord::Base if respond_to?(:table_name=) self.table_name = 'schema_info' else # this is becoming deprecated in ActiveRecord but not all adapters supported it # at this time set_table_name 'schema_info' end VERSION = 12 end activerecord-import-0.28.2/test/schema/sqlite3_schema.rb0000644000004100000410000000065113424140671023271 0ustar www-datawww-dataActiveRecord::Schema.define do create_table :alarms, force: true do |t| t.column :device_id, :integer, null: false t.column :alarm_type, :integer, null: false t.column :status, :integer, null: false t.column :metadata, :text t.column :secret_key, :binary t.datetime :created_at t.datetime :updated_at end add_index :alarms, [:device_id, :alarm_type], unique: true, where: 'status <> 0' end activerecord-import-0.28.2/test/schema/postgresql_schema.rb0000644000004100000410000000213213424140671024104 0ustar www-datawww-dataActiveRecord::Schema.define do execute('CREATE extension IF NOT EXISTS "hstore";') execute('CREATE extension IF NOT EXISTS "pgcrypto";') execute('CREATE extension IF NOT EXISTS "uuid-ossp";') create_table :vendors, id: :uuid, force: :cascade do |t| t.string :name, null: true t.text :preferences if t.respond_to?(:json) t.json :data else t.text :data end if t.respond_to?(:hstore) t.hstore :config else t.text :config end if t.respond_to?(:jsonb) t.jsonb :settings t.jsonb :json_data, null: false, default: {} else t.text :settings t.text :json_data end t.datetime :created_at t.datetime :updated_at end create_table :alarms, force: true do |t| t.column :device_id, :integer, null: false t.column :alarm_type, :integer, null: false t.column :status, :integer, null: false t.column :metadata, :text t.column :secret_key, :binary t.datetime :created_at t.datetime :updated_at end add_index :alarms, [:device_id, :alarm_type], unique: true, where: 'status <> 0' end activerecord-import-0.28.2/test/schema/generic_schema.rb0000644000004100000410000001143513424140671023323 0ustar www-datawww-dataActiveRecord::Schema.define do create_table :schema_info, force: :cascade do |t| t.integer :version, unique: true end SchemaInfo.create version: SchemaInfo::VERSION create_table :group, force: :cascade do |t| t.string :order t.timestamps null: true end create_table :topics, force: :cascade do |t| t.string :title, null: false t.string :author_name t.string :author_email_address t.datetime :written_on t.time :bonus_time t.datetime :last_read t.text :content t.boolean :approved, default: '1' t.integer :replies_count t.integer :parent_id t.string :type t.datetime :created_at t.datetime :created_on t.datetime :updated_at t.datetime :updated_on end create_table :projects, force: :cascade do |t| t.string :name t.string :type end create_table :developers, force: :cascade do |t| t.string :name t.integer :salary, default: '70000' t.datetime :created_at t.integer :team_id t.datetime :updated_at end create_table :addresses, force: :cascade do |t| t.string :address t.string :city t.string :state t.string :zip t.integer :developer_id end create_table :teams, force: :cascade do |t| t.string :name end create_table :books, force: :cascade do |t| t.string :title, null: false t.string :publisher, null: false, default: 'Default Publisher' t.string :author_name, null: false t.datetime :created_at t.datetime :created_on t.datetime :updated_at t.datetime :updated_on t.date :publish_date t.integer :topic_id t.integer :tag_id t.integer :publisher_id t.boolean :for_sale, default: true t.integer :status, default: 0 t.string :type end create_table :chapters, force: :cascade do |t| t.string :title t.integer :book_id, null: false t.datetime :created_at t.datetime :updated_at end create_table :end_notes, primary_key: :end_note_id, force: :cascade do |t| t.string :note t.integer :book_id, null: false t.datetime :created_at t.datetime :updated_at end create_table :languages, force: :cascade do |t| t.string :name t.integer :developer_id end create_table :shopping_carts, force: :cascade do |t| t.string :name, null: true t.datetime :created_at t.datetime :updated_at end create_table :cart_items, force: :cascade do |t| t.string :shopping_cart_id, null: false t.string :book_id, null: false t.integer :copies, default: 1 t.datetime :created_at t.datetime :updated_at end add_index :cart_items, [:shopping_cart_id, :book_id], unique: true, name: 'uk_shopping_cart_books' create_table :animals, force: :cascade do |t| t.string :name, null: false t.string :size, default: nil t.datetime :created_at t.datetime :updated_at end add_index :animals, [:name], unique: true, name: 'uk_animals' create_table :widgets, id: false, force: :cascade do |t| t.integer :w_id, primary_key: true t.boolean :active, default: false t.text :data t.text :json_data t.text :unspecified_data t.text :custom_data end create_table :promotions, primary_key: :promotion_id, force: :cascade do |t| t.string :code t.string :description t.decimal :discount end add_index :promotions, [:code], unique: true, name: 'uk_code' create_table :discounts, force: :cascade do |t| t.decimal :amount t.integer :discountable_id t.string :discountable_type end create_table :rules, id: false, force: :cascade do |t| t.integer :id t.string :condition_text t.integer :question_id end create_table :questions, force: :cascade do |t| t.string :body end create_table :vendors, force: :cascade do |t| t.string :name, null: true t.text :preferences t.text :data t.text :config t.text :settings end create_table :cars, id: false, force: :cascade do |t| t.string :Name, null: true t.string :Features end create_table :users, force: :cascade do |t| t.string :name, null: false t.integer :lock_version, null: false, default: 0 end create_table :user_tokens, force: :cascade do |t| t.string :user_name, null: false t.string :token, null: false end create_table :accounts, force: :cascade do |t| t.string :name, null: false t.integer :lock, null: false, default: 0 end create_table :bike_makers, force: :cascade do |t| t.string :name, null: false t.integer :lock_version, null: false, default: 0 end add_index :cars, :Name, unique: true unless ENV["SKIP_COMPOSITE_PK"] execute %( CREATE TABLE IF NOT EXISTS tags ( tag_id INT NOT NULL, publisher_id INT NOT NULL, tag VARCHAR(50), PRIMARY KEY (tag_id, publisher_id) ); ).split.join(' ').strip end end activerecord-import-0.28.2/test/schema/mysql2_schema.rb0000644000004100000410000000115013424140671023127 0ustar www-datawww-dataActiveRecord::Schema.define do create_table :books, force: :cascade do |t| t.string :title, null: false t.virtual :upper_title, type: :string, as: "upper(`title`)" if t.respond_to?(:virtual) t.string :publisher, null: false, default: 'Default Publisher' t.string :author_name, null: false t.datetime :created_at t.datetime :created_on t.datetime :updated_at t.datetime :updated_on t.date :publish_date t.integer :topic_id t.integer :tag_id t.integer :publisher_id t.boolean :for_sale, default: true t.integer :status, default: 0 t.string :type end end activerecord-import-0.28.2/test/synchronize_test.rb0000644000004100000410000000352713424140671022544 0ustar www-datawww-datarequire File.expand_path('../test_helper', __FILE__) describe ".synchronize" do let(:topics) { Generate(3, :topics) } let(:titles) { %w(one two three) } setup do # update records outside of ActiveRecord knowing about it Topic.connection.execute( "UPDATE #{Topic.table_name} SET title='#{titles[0]}_haha' WHERE id=#{topics[0].id}", "Updating record 1 without ActiveRecord" ) Topic.connection.execute( "UPDATE #{Topic.table_name} SET title='#{titles[1]}_haha' WHERE id=#{topics[1].id}", "Updating record 2 without ActiveRecord" ) Topic.connection.execute( "UPDATE #{Topic.table_name} SET title='#{titles[2]}_haha' WHERE id=#{topics[2].id}", "Updating record 3 without ActiveRecord" ) end it "reloads data for the specified records" do Topic.synchronize topics actual_titles = topics.map(&:title) assert_equal "#{titles[0]}_haha", actual_titles[0], "the first record was not correctly updated" assert_equal "#{titles[1]}_haha", actual_titles[1], "the second record was not correctly updated" assert_equal "#{titles[2]}_haha", actual_titles[2], "the third record was not correctly updated" end it "the synchronized records aren't dirty" do # Update the in memory records so they're dirty topics.each { |topic| topic.title = 'dirty title' } Topic.synchronize topics assert_equal false, topics[0].changed?, "the first record was dirty" assert_equal false, topics[1].changed?, "the second record was dirty" assert_equal false, topics[2].changed?, "the third record was dirty" end it "ignores default scope" do # update records outside of ActiveRecord knowing about it Topic.connection.execute( "UPDATE #{Topic.table_name} SET approved='0' WHERE id=#{topics[0].id}", "Updating record 1 without ActiveRecord" ) Topic.synchronize topics assert_equal false, topics[0].approved end end activerecord-import-0.28.2/test/mysql2/0000755000004100000410000000000013424140671020025 5ustar www-datawww-dataactiverecord-import-0.28.2/test/mysql2/import_test.rb0000644000004100000410000000042313424140671022722 0ustar www-datawww-datarequire File.expand_path(File.dirname(__FILE__) + '/../test_helper') require File.expand_path(File.dirname(__FILE__) + '/../support/assertions') require File.expand_path(File.dirname(__FILE__) + '/../support/mysql/import_examples') should_support_mysql_import_functionality activerecord-import-0.28.2/test/value_sets_records_parser_test.rb0000644000004100000410000000213313424140671025430 0ustar www-datawww-datarequire File.expand_path(File.dirname(__FILE__) + '/test_helper') require 'activerecord-import/value_sets_parser' describe "ActiveRecord::Import::ValueSetsRecordsParser" do context "#parse - computing insert value sets" do let(:parser) { ActiveRecord::Import::ValueSetsRecordsParser } let(:base_sql) { "INSERT INTO atable (a,b,c)" } let(:values) { ["(1,2,3)", "(2,3,4)", "(3,4,5)"] } context "when the max number of records is 1" do it "should return 3 value sets when given 3 values sets" do value_sets = parser.parse values, max_records: 1 assert_equal 3, value_sets.size end end context "when the max number of records is 2" do it "should return 2 value sets when given 3 values sets" do value_sets = parser.parse values, max_records: 2 assert_equal 2, value_sets.size end end context "when the max number of records is 3" do it "should return 1 value sets when given 3 values sets" do value_sets = parser.parse values, max_records: 3 assert_equal 1, value_sets.size end end end end activerecord-import-0.28.2/test/sqlite3/0000755000004100000410000000000013424140671020162 5ustar www-datawww-dataactiverecord-import-0.28.2/test/sqlite3/import_test.rb0000644000004100000410000000031313424140671023055 0ustar www-datawww-datarequire File.expand_path(File.dirname(__FILE__) + '/../test_helper') require File.expand_path(File.dirname(__FILE__) + '/../support/sqlite3/import_examples') should_support_sqlite3_import_functionality activerecord-import-0.28.2/test/models/0000755000004100000410000000000013424140671020061 5ustar www-datawww-dataactiverecord-import-0.28.2/test/models/bike_maker.rb0000644000004100000410000000015013424140671022473 0ustar www-datawww-datamodule Bike def self.table_name_prefix 'bike_' end class Maker < ActiveRecord::Base end end activerecord-import-0.28.2/test/models/car.rb0000644000004100000410000000007613424140671021156 0ustar www-datawww-dataclass Car < ActiveRecord::Base self.primary_key = :Name end activerecord-import-0.28.2/test/models/alarm.rb0000644000004100000410000000004513424140671021501 0ustar www-datawww-dataclass Alarm < ActiveRecord::Base end activerecord-import-0.28.2/test/models/vendor.rb0000644000004100000410000000031013424140671021675 0ustar www-datawww-dataclass Vendor < ActiveRecord::Base store :preferences, accessors: [:color], coder: JSON store_accessor :data, :size store_accessor :config, :contact store_accessor :settings, :charge_code end activerecord-import-0.28.2/test/models/group.rb0000644000004100000410000000010113424140671021532 0ustar www-datawww-dataclass Group < ActiveRecord::Base self.table_name = 'group' end activerecord-import-0.28.2/test/models/account.rb0000644000004100000410000000010513424140671022036 0ustar www-datawww-dataclass Account < ActiveRecord::Base self.locking_column = :lock end activerecord-import-0.28.2/test/models/tag.rb0000644000004100000410000000022413424140671021157 0ustar www-datawww-dataclass Tag < ActiveRecord::Base self.primary_keys = :tag_id, :publisher_id unless ENV["SKIP_COMPOSITE_PK"] has_many :books, inverse_of: :tag end activerecord-import-0.28.2/test/models/user.rb0000644000004100000410000000015113424140671021361 0ustar www-datawww-dataclass User < ActiveRecord::Base has_many :user_tokens, primary_key: :name, foreign_key: :user_name end activerecord-import-0.28.2/test/models/promotion.rb0000644000004100000410000000011413424140671022430 0ustar www-datawww-dataclass Promotion < ActiveRecord::Base self.primary_key = :promotion_id end activerecord-import-0.28.2/test/models/rule.rb0000644000004100000410000000007313424140671021355 0ustar www-datawww-dataclass Rule < ActiveRecord::Base belongs_to :question end activerecord-import-0.28.2/test/models/chapter.rb0000644000004100000410000000016413424140671022035 0ustar www-datawww-dataclass Chapter < ActiveRecord::Base belongs_to :book, inverse_of: :chapters validates :title, presence: true end activerecord-import-0.28.2/test/models/user_token.rb0000644000004100000410000000021313424140671022560 0ustar www-datawww-dataclass UserToken < ActiveRecord::Base belongs_to :user, primary_key: :name, foreign_key: :user_name validates :user, presence: true end activerecord-import-0.28.2/test/models/topic.rb0000644000004100000410000000145013424140671021524 0ustar www-datawww-dataclass Topic < ActiveRecord::Base validates_presence_of :author_name validates :title, numericality: { only_integer: true }, on: :context_test validates :title, uniqueness: true validates :content, uniqueness: true validates :word_count, numericality: { greater_than: 0 }, if: :content? validate -> { errors.add(:title, :validate_failed) if title == 'validate_failed' } before_validation -> { errors.add(:title, :invalid) if title == 'invalid' } has_many :books, inverse_of: :topic belongs_to :parent, class_name: "Topic" composed_of :description, mapping: [%w(title title), %w(author_name author_name)], allow_nil: true, class_name: "TopicDescription" default_scope { where(approved: true) } private def word_count @word_count ||= content.to_s.scan(/\w+/).count end end activerecord-import-0.28.2/test/models/discount.rb0000644000004100000410000000012613424140671022235 0ustar www-datawww-dataclass Discount < ActiveRecord::Base belongs_to :discountable, polymorphic: true end activerecord-import-0.28.2/test/models/book.rb0000644000004100000410000000050213424140671021335 0ustar www-datawww-dataclass Book < ActiveRecord::Base belongs_to :topic, inverse_of: :books belongs_to :tag, foreign_key: [:tag_id, :parent_id] has_many :chapters, inverse_of: :book has_many :discounts, as: :discountable has_many :end_notes, inverse_of: :book enum status: [:draft, :published] if ENV['AR_VERSION'].to_f >= 4.1 end activerecord-import-0.28.2/test/models/dictionary.rb0000644000004100000410000000006513424140671022554 0ustar www-datawww-datarequire_relative 'book' class Dictionary < Book end activerecord-import-0.28.2/test/models/end_note.rb0000644000004100000410000000016413424140671022202 0ustar www-datawww-dataclass EndNote < ActiveRecord::Base belongs_to :book, inverse_of: :end_notes validates :note, presence: true end activerecord-import-0.28.2/test/models/widget.rb0000644000004100000410000000061313424140671021671 0ustar www-datawww-dataclass CustomCoder def load(value) if value.nil? {} else YAML.load(value) end end def dump(value) YAML.dump(value) end end class Widget < ActiveRecord::Base self.primary_key = :w_id default_scope -> { where(active: true) } serialize :data, Hash serialize :json_data, JSON serialize :unspecified_data serialize :custom_data, CustomCoder.new end activerecord-import-0.28.2/test/models/question.rb0000644000004100000410000000007013424140671022252 0ustar www-datawww-dataclass Question < ActiveRecord::Base has_one :rule end activerecord-import-0.28.2/test/makara_postgis/0000755000004100000410000000000013424140671021602 5ustar www-datawww-dataactiverecord-import-0.28.2/test/makara_postgis/import_test.rb0000644000004100000410000000051213424140671024476 0ustar www-datawww-datarequire File.expand_path(File.dirname(__FILE__) + '/../test_helper') require File.expand_path(File.dirname(__FILE__) + '/../support/postgresql/import_examples') should_support_postgresql_import_functionality if ActiveRecord::Base.connection.supports_on_duplicate_key_update? should_support_postgresql_upsert_functionality end activerecord-import-0.28.2/test/import_test.rb0000644000004100000410000007566213424140671021514 0ustar www-datawww-datarequire File.expand_path('../test_helper', __FILE__) describe "#import" do it "should return the number of inserts performed" do # see ActiveRecord::ConnectionAdapters::AbstractAdapter test for more specifics assert_difference "Topic.count", +10 do result = Topic.import Build(3, :topics) assert result.num_inserts > 0 result = Topic.import Build(7, :topics) assert result.num_inserts > 0 end end it "warns you that you're using the library wrong" do error = assert_raise(ArgumentError) { Topic.import %w(title author_name), ['Author #1', 'Book #1', 0] } assert_equal error.message, "Last argument should be a two dimensional array '[[]]'. First element in array was a String" end it "should not produce an error when importing empty arrays" do assert_nothing_raised do Topic.import [] Topic.import %w(title author_name), [] end end describe "argument safety" do it "should not modify the passed in columns array" do assert_nothing_raised do columns = %w(title author_name).freeze Topic.import columns, [%w(foo bar)] end end it "should not modify the passed in values array" do assert_nothing_raised do record = %w(foo bar).freeze values = [record].freeze Topic.import %w(title author_name), values end end end describe "with non-default ActiveRecord models" do context "that have a non-standard primary key (that is no sequence)" do it "should import models successfully" do assert_difference "Widget.count", +3 do Widget.import Build(3, :widgets) end end context "with uppercase letters" do it "should import models successfully" do assert_difference "Car.count", +3 do Car.import Build(3, :cars) end end end end context "that have no primary key" do it "should import models successfully" do assert_difference "Rule.count", +3 do Rule.import Build(3, :rules) end end end end describe "with an array of hashes" do let(:columns) { [:title, :author_name] } let(:values) { [{ title: "LDAP", author_name: "Jerry Carter", author_email_address: "jcarter@test.com" }, { title: "Rails Recipes", author_name: "Chad Fowler", author_email_address: "cfowler@test.com" }] } it "should import hash data successfully" do assert_difference "Topic.count", +2 do Topic.import values, validate: false end end it "should import specified hash data successfully" do assert_difference "Topic.count", +2 do Topic.import columns, values, validate: false end Topic.all.each do |t| assert_nil t.author_email_address end end context "with extra keys" do let(:values) do [ { title: "LDAP", author_name: "Jerry Carter" }, { title: "Rails Recipes", author_name: "Chad Fowler", author_email_address: "cfowler@test.com" } # author_email_address is unknown ] end it "should fail when column names are not specified" do err = assert_raises ArgumentError do Topic.import values, validate: false end assert err.message.include? 'Extra keys: [:author_email_address]' end it "should succeed when column names are specified" do assert_difference "Topic.count", +2 do Topic.import columns, values, validate: false end end end context "with missing keys" do let(:values) do [ { title: "LDAP", author_name: "Jerry Carter" }, { title: "Rails Recipes" } # author_name is missing ] end it "should fail when column names are not specified" do err = assert_raises ArgumentError do Topic.import values, validate: false end assert err.message.include? 'Missing keys: [:author_name]' end it "should fail on missing hash key from specified column names" do err = assert_raises ArgumentError do Topic.import %i(author_name), values, validate: false end assert err.message.include? 'Missing keys: [:author_name]' end end end unless ENV["SKIP_COMPOSITE_PK"] describe "with composite primary keys" do it "should import models successfully" do tags = [Tag.new(tag_id: 1, publisher_id: 1, tag: 'Mystery')] assert_difference "Tag.count", +1 do Tag.import tags end end it "should import array of values successfully" do columns = [:tag_id, :publisher_id, :tag] values = [[1, 1, 'Mystery'], [2, 1, 'Science']] assert_difference "Tag.count", +2 do Tag.import columns, values, validate: false end end end end describe "with STI models" do it "should import models successfully" do dictionaries = [Dictionary.new(author_name: "Noah Webster", title: "Webster's Dictionary")] assert_difference "Dictionary.count", +1 do Dictionary.import dictionaries end assert_equal "Dictionary", Dictionary.first.type end end context "with :validation option" do let(:columns) { %w(title author_name content) } let(:valid_values) { [["LDAP", "Jerry Carter", "Putting Directories to Work."], ["Rails Recipes", "Chad Fowler", "A trusted collection of solutions."]] } let(:valid_values_with_context) { [[1111, "Jerry Carter", "1111"], [2222, "Chad Fowler", "2222"]] } let(:invalid_values) { [["The RSpec Book", "David Chelimsky", "..."], ["Agile+UX", "", "All about Agile in UX."]] } let(:valid_models) { valid_values.map { |title, author_name, content| Topic.new(title: title, author_name: author_name, content: content) } } let(:invalid_models) { invalid_values.map { |title, author_name, content| Topic.new(title: title, author_name: author_name, content: content) } } context "with validation checks turned off" do it "should import valid data" do assert_difference "Topic.count", +2 do Topic.import columns, valid_values, validate: false end end it "should import invalid data" do assert_difference "Topic.count", +2 do Topic.import columns, invalid_values, validate: false end end it 'should raise a specific error if a column does not exist' do assert_raises ActiveRecord::Import::MissingColumnError do Topic.import ['foo'], [['bar']], validate: false end end end context "with validation checks turned on" do it "should import valid data" do assert_difference "Topic.count", +2 do Topic.import columns, valid_values, validate: true end end it "should import valid data with on option" do assert_difference "Topic.count", +2 do Topic.import columns, valid_values_with_context, validate_with_context: :context_test end end it "should ignore uniqueness validators" do Topic.import columns, valid_values assert_difference "Topic.count", +2 do Topic.import columns, valid_values end end it "should not alter the callback chain of the model" do attributes = columns.zip(valid_values.first).to_h topic = Topic.new attributes Topic.import [topic], validate: true duplicate_topic = Topic.new attributes Topic.import [duplicate_topic], validate: true assert duplicate_topic.invalid? end it "should not import invalid data" do assert_no_difference "Topic.count" do Topic.import columns, invalid_values, validate: true end end it "should import invalid data with on option" do assert_no_difference "Topic.count" do Topic.import columns, valid_values, validate_with_context: :context_test end end it "should report the failed instances" do results = Topic.import columns, invalid_values, validate: true assert_equal invalid_values.size, results.failed_instances.size assert_not_equal results.failed_instances.first, results.failed_instances.last results.failed_instances.each do |e| assert_kind_of Topic, e assert_equal e.errors.count, 1 end end it "should set ids in valid models if adapter supports setting primary key of imported objects" do if ActiveRecord::Base.supports_setting_primary_key_of_imported_objects? Topic.import (invalid_models + valid_models), validate: true assert_nil invalid_models[0].id assert_nil invalid_models[1].id assert_equal valid_models[0].id, Topic.all[0].id assert_equal valid_models[1].id, Topic.all[1].id end end it "should set ActiveRecord timestamps in valid models if adapter supports setting primary key of imported objects" do if ActiveRecord::Base.supports_setting_primary_key_of_imported_objects? Timecop.freeze(Time.at(0)) do Topic.import (invalid_models + valid_models), validate: true end assert_nil invalid_models[0].created_at assert_nil invalid_models[0].updated_at assert_nil invalid_models[1].created_at assert_nil invalid_models[1].updated_at assert_equal valid_models[0].created_at, Topic.all[0].created_at assert_equal valid_models[0].updated_at, Topic.all[0].updated_at assert_equal valid_models[1].created_at, Topic.all[1].created_at assert_equal valid_models[1].updated_at, Topic.all[1].updated_at end end it "should import valid data when mixed with invalid data" do assert_difference "Topic.count", +2 do Topic.import columns, valid_values + invalid_values, validate: true end assert_equal 0, Topic.where(title: invalid_values.map(&:first)).count end it "should run callbacks" do assert_no_difference "Topic.count" do Topic.import columns, [["invalid", "Jerry Carter"]], validate: true end end it "should call validation methods" do assert_no_difference "Topic.count" do Topic.import columns, [["validate_failed", "Jerry Carter"]], validate: true end end end context "with uniqueness validators included" do it "should not import duplicate records" do Topic.import columns, valid_values assert_no_difference "Topic.count" do Topic.import columns, valid_values, validate_uniqueness: true end end end context "when validatoring presence of belongs_to association" do it "should not import records without foreign key" do assert_no_difference "UserToken.count" do UserToken.import [:token], [['12345abcdef67890']] end end it "should import records with foreign key" do assert_difference "UserToken.count", +1 do UserToken.import [:user_name, :token], [%w("Bob", "12345abcdef67890")] end end it "should not mutate the defined validations" do UserToken.import [:user_name, :token], [%w("Bob", "12345abcdef67890")] ut = UserToken.new ut.valid? assert_includes ut.errors.messages, :user end end end context "without :validation option" do let(:columns) { %w(title author_name) } let(:invalid_values) { [["The RSpec Book", ""], ["Agile+UX", ""]] } it "should not import invalid data" do assert_no_difference "Topic.count" do result = Topic.import columns, invalid_values assert_equal 2, result.failed_instances.size end end end context "with :all_or_none option" do let(:columns) { %w(title author_name) } let(:valid_values) { [["LDAP", "Jerry Carter"], ["Rails Recipes", "Chad Fowler"]] } let(:invalid_values) { [["The RSpec Book", ""], ["Agile+UX", ""]] } let(:mixed_values) { valid_values + invalid_values } context "with validation checks turned on" do it "should import valid data" do assert_difference "Topic.count", +2 do Topic.import columns, valid_values, all_or_none: true end end it "should not import invalid data" do assert_no_difference "Topic.count" do Topic.import columns, invalid_values, all_or_none: true end end it "should not import valid data when mixed with invalid data" do assert_no_difference "Topic.count" do Topic.import columns, mixed_values, all_or_none: true end end it "should report the failed instances" do results = Topic.import columns, mixed_values, all_or_none: true assert_equal invalid_values.size, results.failed_instances.size results.failed_instances.each { |e| assert_kind_of Topic, e } end it "should report the zero inserts" do results = Topic.import columns, mixed_values, all_or_none: true assert_equal 0, results.num_inserts end end end context "with :batch_size option" do it "should import with a single insert" do assert_difference "Topic.count", +10 do result = Topic.import Build(10, :topics), batch_size: 10 assert_equal 1, result.num_inserts if Topic.supports_import? end end it "should import with multiple inserts" do assert_difference "Topic.count", +10 do result = Topic.import Build(10, :topics), batch_size: 4 assert_equal 3, result.num_inserts if Topic.supports_import? end end end context "with :synchronize option" do context "synchronizing on new records" do let(:new_topics) { Build(3, :topics) } it "doesn't reload any data (doesn't work)" do Topic.import new_topics, synchronize: new_topics if Topic.supports_setting_primary_key_of_imported_objects? assert new_topics.all?(&:persisted?), "Records should have been reloaded" else assert new_topics.all?(&:new_record?), "No record should have been reloaded" end end end context "synchronizing on new records with explicit conditions" do let(:new_topics) { Build(3, :topics) } it "reloads data for existing in-memory instances" do Topic.import(new_topics, synchronize: new_topics, synchronize_keys: [:title] ) assert new_topics.all?(&:persisted?), "Records should have been reloaded" end end context "synchronizing on destroyed records with explicit conditions" do let(:new_topics) { Generate(3, :topics) } it "reloads data for existing in-memory instances" do new_topics.each(&:destroy) Topic.import(new_topics, synchronize: new_topics, synchronize_keys: [:title] ) assert new_topics.all?(&:persisted?), "Records should have been reloaded" end end end context "with an array of unsaved model instances" do let(:topic) { Build(:topic, title: "The RSpec Book", author_name: "David Chelimsky") } let(:topics) { Build(9, :topics) } let(:invalid_topics) { Build(7, :invalid_topics) } it "should import records based on those model's attributes" do assert_difference "Topic.count", +9 do Topic.import topics end Topic.import [topic] assert Topic.where(title: "The RSpec Book", author_name: "David Chelimsky").first end it "should not overwrite existing records" do topic = Generate(:topic, title: "foobar") assert_no_difference "Topic.count" do begin Topic.transaction do topic.title = "baz" Topic.import [topic] end rescue Exception # PostgreSQL raises PgError due to key constraints # I don't know why ActiveRecord doesn't catch these. *sigh* end end assert_equal "foobar", topic.reload.title end context "with validation checks turned on" do it "should import valid models" do assert_difference "Topic.count", +9 do Topic.import topics, validate: true end end it "should not import invalid models" do assert_no_difference "Topic.count" do Topic.import invalid_topics, validate: true end end end context "with validation checks turned off" do it "should import invalid models" do assert_difference "Topic.count", +7 do Topic.import invalid_topics, validate: false end end end end context "with an array of columns and an array of unsaved model instances" do let(:topics) { Build(2, :topics) } it "should import records populating the supplied columns with the corresponding model instance attributes" do assert_difference "Topic.count", +2 do Topic.import [:author_name, :title], topics end # imported topics should be findable by their imported attributes assert Topic.where(author_name: topics.first.author_name).first assert Topic.where(author_name: topics.last.author_name).first end it "should not populate fields for columns not imported" do topics.first.author_email_address = "zach.dennis@gmail.com" assert_difference "Topic.count", +2 do Topic.import [:author_name, :title], topics end assert !Topic.where(author_email_address: "zach.dennis@gmail.com").first end end context "with an array of columns and an array of values" do it "should import ids when specified" do Topic.import [:id, :author_name, :title], [[99, "Bob Jones", "Topic 99"]] assert_equal 99, Topic.last.id end it "ignores the recursive option" do assert_difference "Topic.count", +1 do Topic.import [:author_name, :title], [["David Chelimsky", "The RSpec Book"]], recursive: true end end end context "ActiveRecord timestamps" do let(:time) { Chronic.parse("5 minutes ago") } context "when the timestamps columns are present" do setup do @existing_book = Book.create(title: "Fell", author_name: "Curry", publisher: "Bayer", created_at: 2.years.ago.utc, created_on: 2.years.ago.utc, updated_at: 2.years.ago.utc, updated_on: 2.years.ago.utc) ActiveRecord::Base.default_timezone = :utc Timecop.freeze(time) do assert_difference "Book.count", +2 do Book.import %w(title author_name publisher created_at created_on updated_at updated_on), [["LDAP", "Big Bird", "Del Rey", nil, nil, nil, nil], [@existing_book.title, @existing_book.author_name, @existing_book.publisher, @existing_book.created_at, @existing_book.created_on, @existing_book.updated_at, @existing_book.updated_on]] end end @new_book, @existing_book = Book.last 2 end it "should set the created_at column for new records" do assert_in_delta time.to_i, @new_book.created_at.to_i, 1.second end it "should set the created_on column for new records" do assert_in_delta time.to_i, @new_book.created_on.to_i, 1.second end it "should not set the created_at column for existing records" do assert_equal 2.years.ago.utc.strftime("%Y:%d"), @existing_book.created_at.strftime("%Y:%d") end it "should not set the created_on column for existing records" do assert_equal 2.years.ago.utc.strftime("%Y:%d"), @existing_book.created_on.strftime("%Y:%d") end it "should set the updated_at column for new records" do assert_in_delta time.to_i, @new_book.updated_at.to_i, 1.second end it "should set the updated_on column for new records" do assert_in_delta time.to_i, @new_book.updated_on.to_i, 1.second end it "should not set the updated_at column for existing records" do assert_equal 2.years.ago.utc.strftime("%Y:%d"), @existing_book.updated_at.strftime("%Y:%d") end it "should not set the updated_on column for existing records" do assert_equal 2.years.ago.utc.strftime("%Y:%d"), @existing_book.updated_on.strftime("%Y:%d") end it "should not set the updated_at column on models if changed" do timestamp = Time.now.utc books = [ Book.new(author_name: "Foo", title: "Baz", created_at: timestamp, updated_at: timestamp) ] Book.import books assert_equal timestamp.strftime("%Y:%d"), Book.last.updated_at.strftime("%Y:%d") end end context "when a custom time zone is set" do setup do Timecop.freeze(time) do assert_difference "Book.count", +1 do Book.import [:title, :author_name, :publisher], [["LDAP", "Big Bird", "Del Rey"]] end end @book = Book.last end it "should set the created_at and created_on timestamps for new records" do assert_in_delta time.to_i, @book.created_at.to_i, 1.second assert_in_delta time.to_i, @book.created_on.to_i, 1.second end it "should set the updated_at and updated_on timestamps for new records" do assert_in_delta time.to_i, @book.updated_at.to_i, 1.second assert_in_delta time.to_i, @book.updated_on.to_i, 1.second end end end context "importing with database reserved words" do let(:group) { Build(:group, order: "superx") } it "should import just fine" do assert_difference "Group.count", +1 do Group.import [group] end assert_equal "superx", Group.first.order end end context "importing a datetime field" do it "should import a date with YYYY/MM/DD format just fine" do Topic.import [:author_name, :title, :last_read], [["Bob Jones", "Topic 2", "2010/05/14"]] assert_equal "2010/05/14".to_date, Topic.last.last_read.to_date end end context "importing through an association scope" do { has_many: :chapters, polymorphic: :discounts }.each do |association_type, association| book = FactoryBot.create :book scope = book.public_send association klass = { chapters: Chapter, discounts: Discount }[association] column = { chapters: :title, discounts: :amount }[association] val1 = { chapters: 'A', discounts: 5 }[association] val2 = { chapters: 'B', discounts: 6 }[association] context "for #{association_type}" do it "works importing models" do scope.import [ klass.new(column => val1), klass.new(column => val2) ] assert_equal [val1, val2], scope.map(&column).sort end it "works importing array of columns and values" do scope.import [column], [[val1], [val2]] assert_equal [val1, val2], scope.map(&column).sort end it "works importing array of hashes" do scope.import [{ column => val1 }, { column => val2 }] assert_equal [val1, val2], scope.map(&column).sort end end it "works with a non-standard association primary key" do user = User.create(id: 1, name: 'Solomon') user.user_tokens.import [:id, :token], [[5, '12345abcdef67890']] token = UserToken.find(5) assert_equal 'Solomon', token.user_name end end end context "importing model with polymorphic belongs_to" do it "works without error" do book = FactoryBot.create :book discount = Discount.new(discountable: book) Discount.import([discount]) assert_equal 1, Discount.count end end context 'When importing models with Enum fields' do it 'should be able to import enum fields' do Book.delete_all if Book.count > 0 books = [ Book.new(author_name: "Foo", title: "Baz", status: 0), Book.new(author_name: "Foo2", title: "Baz2", status: 1), ] Book.import books assert_equal 2, Book.count if ENV['AR_VERSION'].to_i >= 5.0 assert_equal 'draft', Book.first.read_attribute('status') assert_equal 'published', Book.last.read_attribute('status') else assert_equal 0, Book.first.read_attribute('status') assert_equal 1, Book.last.read_attribute('status') end end it 'should be able to import enum fields with default value' do Book.delete_all if Book.count > 0 books = [ Book.new(author_name: "Foo", title: "Baz") ] Book.import books assert_equal 1, Book.count if ENV['AR_VERSION'].to_i >= 5.0 assert_equal 'draft', Book.first.read_attribute('status') else assert_equal 0, Book.first.read_attribute('status') end end if ENV['AR_VERSION'].to_f > 4.1 it 'should be able to import enum fields by name' do Book.delete_all if Book.count > 0 books = [ Book.new(author_name: "Foo", title: "Baz", status: :draft), Book.new(author_name: "Foo2", title: "Baz2", status: :published), ] Book.import books assert_equal 2, Book.count if ENV['AR_VERSION'].to_i >= 5.0 assert_equal 'draft', Book.first.read_attribute('status') assert_equal 'published', Book.last.read_attribute('status') else assert_equal 0, Book.first.read_attribute('status') assert_equal 1, Book.last.read_attribute('status') end end end end context 'When importing arrays of values with Enum fields' do let(:columns) { [:author_name, :title, :status] } let(:values) { [['Author #1', 'Book #1', 0], ['Author #2', 'Book #2', 1]] } it 'should be able to import enum fields' do Book.delete_all if Book.count > 0 Book.import columns, values assert_equal 2, Book.count if ENV['AR_VERSION'].to_i >= 5.0 assert_equal 'draft', Book.first.read_attribute('status') assert_equal 'published', Book.last.read_attribute('status') else assert_equal 0, Book.first.read_attribute('status') assert_equal 1, Book.last.read_attribute('status') end end end context 'importing arrays of values with boolean fields' do let(:columns) { [:author_name, :title, :for_sale] } it 'should be able to coerce integers as boolean fields' do Book.delete_all if Book.count > 0 values = [['Author #1', 'Book #1', 0], ['Author #2', 'Book #2', 1]] assert_difference "Book.count", +2 do Book.import columns, values end assert_equal false, Book.first.for_sale assert_equal true, Book.last.for_sale end it 'should be able to coerce strings as boolean fields' do Book.delete_all if Book.count > 0 values = [['Author #1', 'Book #1', 'false'], ['Author #2', 'Book #2', 'true']] assert_difference "Book.count", +2 do Book.import columns, values end assert_equal false, Book.first.for_sale assert_equal true, Book.last.for_sale end end describe "importing when model has default_scope" do it "doesn't import the default scope values" do assert_difference "Widget.unscoped.count", +2 do Widget.import [:w_id], [[1], [2]] end default_scope_value = Widget.scope_attributes[:active] assert_not_equal default_scope_value, Widget.unscoped.find_by_w_id(1) assert_not_equal default_scope_value, Widget.unscoped.find_by_w_id(2) end it "imports columns that are a part of the default scope using the value specified" do assert_difference "Widget.unscoped.count", +2 do Widget.import [:w_id, :active], [[1, true], [2, false]] end assert_not_equal true, Widget.unscoped.find_by_w_id(1) assert_not_equal false, Widget.unscoped.find_by_w_id(2) end end describe "importing serialized fields" do it "imports values for serialized Hash fields" do assert_difference "Widget.unscoped.count", +1 do Widget.import [:w_id, :data], [[1, { a: :b }]] end assert_equal({ a: :b }, Widget.find_by_w_id(1).data) end it "imports values for serialized fields" do assert_difference "Widget.unscoped.count", +1 do Widget.import [:w_id, :unspecified_data], [[1, { a: :b }]] end assert_equal({ a: :b }, Widget.find_by_w_id(1).unspecified_data) end it "imports values for custom coder" do assert_difference "Widget.unscoped.count", +1 do Widget.import [:w_id, :custom_data], [[1, { a: :b }]] end assert_equal({ a: :b }, Widget.find_by_w_id(1).custom_data) end let(:data) { { a: :b } } it "imports values for serialized JSON fields" do assert_difference "Widget.unscoped.count", +1 do Widget.import [:w_id, :json_data], [[9, data]] end assert_equal(data.as_json, Widget.find_by_w_id(9).json_data) end it "imports serialized values from saved records" do Widget.import [:w_id, :json_data], [[1, data]] assert_equal data.as_json, Widget.last.json_data w = Widget.last w.w_id = 2 Widget.import([w]) assert_equal data.as_json, Widget.last.json_data end context "with a store" do it "imports serialized attributes set using accessors" do vendors = [Vendor.new(name: 'Vendor 1', color: 'blue')] assert_difference "Vendor.count", +1 do Vendor.import vendors end assert_equal('blue', Vendor.first.color) end end end describe "#import!" do context "with an array of unsaved model instances" do let(:topics) { Build(2, :topics) } let(:invalid_topics) { Build(2, :invalid_topics) } context "with invalid data" do it "should raise ActiveRecord::RecordInvalid" do assert_no_difference "Topic.count" do assert_raise ActiveRecord::RecordInvalid do Topic.import! invalid_topics end end end end context "with valid data" do it "should import data" do assert_difference "Topic.count", +2 do Topic.import! topics end end end end context "with array of columns and array of values" do let(:columns) { %w(title author_name) } let(:valid_values) { [["LDAP", "Jerry Carter"], ["Rails Recipes", "Chad Fowler"]] } let(:invalid_values) { [["Rails Recipes", "Chad Fowler"], ["The RSpec Book", ""], ["Agile+UX", ""]] } context "with invalid data" do it "should raise ActiveRecord::RecordInvalid" do assert_no_difference "Topic.count" do assert_raise ActiveRecord::RecordInvalid do Topic.import! columns, invalid_values end end end end context "with valid data" do it "should import data" do assert_difference "Topic.count", +2 do Topic.import! columns, valid_values end end end end context "with objects that respond to .to_sql as values" do let(:columns) { %w(title author_name) } let(:valid_values) { [["LDAP", Book.select("'Jerry Carter'").limit(1)], ["Rails Recipes", Book.select("'Chad Fowler'").limit(1)]] } it "should import data" do assert_difference "Topic.count", +2 do Topic.import! columns, valid_values topics = Topic.all assert_equal "Jerry Carter", topics.first.author_name assert_equal "Chad Fowler", topics.last.author_name end end end end end activerecord-import-0.28.2/test/postgis/0000755000004100000410000000000013424140671020266 5ustar www-datawww-dataactiverecord-import-0.28.2/test/postgis/import_test.rb0000644000004100000410000000051213424140671023162 0ustar www-datawww-datarequire File.expand_path(File.dirname(__FILE__) + '/../test_helper') require File.expand_path(File.dirname(__FILE__) + '/../support/postgresql/import_examples') should_support_postgresql_import_functionality if ActiveRecord::Base.connection.supports_on_duplicate_key_update? should_support_postgresql_upsert_functionality end activerecord-import-0.28.2/test/jdbcpostgresql/0000755000004100000410000000000013424140671021624 5ustar www-datawww-dataactiverecord-import-0.28.2/test/jdbcpostgresql/import_test.rb0000644000004100000410000000032113424140671024516 0ustar www-datawww-datarequire File.expand_path(File.dirname(__FILE__) + '/../test_helper') require File.expand_path(File.dirname(__FILE__) + '/../support/postgresql/import_examples') should_support_postgresql_import_functionality activerecord-import-0.28.2/test/jdbcmysql/0000755000004100000410000000000013424140671020566 5ustar www-datawww-dataactiverecord-import-0.28.2/test/jdbcmysql/import_test.rb0000644000004100000410000000042313424140671023463 0ustar www-datawww-datarequire File.expand_path(File.dirname(__FILE__) + '/../test_helper') require File.expand_path(File.dirname(__FILE__) + '/../support/assertions') require File.expand_path(File.dirname(__FILE__) + '/../support/mysql/import_examples') should_support_mysql_import_functionality activerecord-import-0.28.2/test/mysqlspatial2/0000755000004100000410000000000013424140671021403 5ustar www-datawww-dataactiverecord-import-0.28.2/test/mysqlspatial2/import_test.rb0000644000004100000410000000042413424140671024301 0ustar www-datawww-datarequire File.expand_path(File.dirname(__FILE__) + '/../test_helper') require File.expand_path(File.dirname(__FILE__) + '/../support/assertions') require File.expand_path(File.dirname(__FILE__) + '/../support/mysql/import_examples') should_support_mysql_import_functionality activerecord-import-0.28.2/test/travis/0000755000004100000410000000000013424140671020106 5ustar www-datawww-dataactiverecord-import-0.28.2/test/travis/database.yml0000644000004100000410000000172613424140671022403 0ustar www-datawww-datacommon: &common username: root password: encoding: utf8 host: localhost database: activerecord_import_test jdbcpostgresql: &postgresql <<: *common username: postgres adapter: jdbcpostgresql min_messages: warning jdbcmysql: &mysql2 <<: *common adapter: jdbcmysql jdbcsqlite3: &sqlite3 <<: *common adapter: jdbcsqlite3 mysql2: &mysql2 <<: *common adapter: mysql2 mysql2spatial: <<: *mysql2 mysql2_makara: <<: *mysql2 oracle: <<: *common adapter: oracle min_messages: debug postgresql: &postgresql <<: *common username: postgres adapter: postgresql min_messages: warning postresql_makara: <<: *postgresql postgis: <<: *postgresql seamless_database_pool: <<: *common adapter: seamless_database_pool pool_adapter: mysql2 prepared_statements: false master: host: localhost sqlite: adapter: sqlite dbfile: test.db sqlite3: &sqlite3 adapter: sqlite3 database: ":memory:" spatialite: <<: *sqlite3 activerecord-import-0.28.2/test/support/0000755000004100000410000000000013424140671020312 5ustar www-datawww-dataactiverecord-import-0.28.2/test/support/factories.rb0000644000004100000410000000274413424140671022625 0ustar www-datawww-dataFactoryBot.define do sequence(:book_title) { |n| "Book #{n}" } sequence(:chapter_title) { |n| "Chapter #{n}" } sequence(:end_note) { |n| "Endnote #{n}" } factory :group do sequence(:order) { |n| "Order #{n}" } end factory :invalid_topic, class: "Topic" do sequence(:title) { |n| "Title #{n}" } author_name { nil } end factory :topic do sequence(:title) { |n| "Title #{n}" } sequence(:author_name) { |n| "Author #{n}" } sequence(:content) { |n| "Content #{n}" } end factory :widget do sequence(:w_id) { |n| n } end factory :question do sequence(:body) { |n| "Text #{n}" } trait :with_rule do after(:build) do |question| question.build_rule(FactoryBot.attributes_for(:rule)) end end end factory :rule do sequence(:id) { |n| n } sequence(:condition_text) { |n| "q_#{n}_#{n}" } end factory :topic_with_book, parent: :topic do after(:build) do |topic| 2.times do book = topic.books.build(title: FactoryBot.generate(:book_title), author_name: 'Stephen King') 3.times do book.chapters.build(title: FactoryBot.generate(:chapter_title)) end 4.times do book.end_notes.build(note: FactoryBot.generate(:end_note)) end end end end factory :book do title { 'Tortilla Flat' } author_name { 'John Steinbeck' } end factory :car do sequence(:Name) { |n| n } sequence(:Features) { |n| "Feature #{n}" } end end activerecord-import-0.28.2/test/support/mysql/0000755000004100000410000000000013424140671021457 5ustar www-datawww-dataactiverecord-import-0.28.2/test/support/mysql/import_examples.rb0000644000004100000410000000766513424140671025232 0ustar www-datawww-data# encoding: UTF-8 def should_support_mysql_import_functionality # Forcefully disable strict mode for this session. ActiveRecord::Base.connection.execute "set sql_mode='STRICT_ALL_TABLES'" should_support_basic_on_duplicate_key_update should_support_on_duplicate_key_ignore describe "#import" do context "with :on_duplicate_key_update and validation checks turned off" do extend ActiveSupport::TestCase::ImportAssertions asssertion_group(:should_support_on_duplicate_key_update) do should_not_update_fields_not_mentioned should_update_foreign_keys should_not_update_created_at_on_timestamp_columns should_update_updated_at_on_timestamp_columns end macro(:perform_import) { raise "supply your own #perform_import in a context below" } macro(:updated_topic) { Topic.find(@topic.id) } let(:columns) { %w( id title author_name author_email_address parent_id ) } let(:values) { [[99, "Book", "John Doe", "john@doe.com", 17]] } let(:updated_values) { [[99, "Book - 2nd Edition", "Author Should Not Change", "johndoe@example.com", 57]] } macro(:perform_import) do |*opts| Topic.import columns, updated_values, opts.extract_options!.merge(on_duplicate_key_update: update_columns, validate: false) end setup do Topic.import columns, values, validate: false @topic = Topic.find 99 end context "using string hash map" do let(:update_columns) { { "title" => "title", "author_email_address" => "author_email_address", "parent_id" => "parent_id" } } should_support_on_duplicate_key_update should_update_fields_mentioned end context "using string hash map, but specifying column mismatches" do let(:update_columns) { { "title" => "author_email_address", "author_email_address" => "title", "parent_id" => "parent_id" } } should_support_on_duplicate_key_update should_update_fields_mentioned_with_hash_mappings end context "using symbol hash map" do let(:update_columns) { { title: :title, author_email_address: :author_email_address, parent_id: :parent_id } } should_support_on_duplicate_key_update should_update_fields_mentioned end context "using symbol hash map, but specifying column mismatches" do let(:update_columns) { { title: :author_email_address, author_email_address: :title, parent_id: :parent_id } } should_support_on_duplicate_key_update should_update_fields_mentioned_with_hash_mappings end end context "with :synchronization option" do let(:topics) { [] } let(:values) { [[topics.first.id, "Jerry Carter", "title1"], [topics.last.id, "Chad Fowler", "title2"]] } let(:columns) { %w(id author_name title) } setup do topics << Topic.create!(title: "LDAP", author_name: "Big Bird", content: "Putting Directories to Work.") topics << Topic.create!(title: "Rails Recipes", author_name: "Elmo", content: "A trusted collection of solutions.") end it "synchronizes passed in ActiveRecord model instances with the data just imported" do columns2update = ['author_name'] expected_count = Topic.count Topic.import( columns, values, validate: false, on_duplicate_key_update: columns2update, synchronize: topics ) assert_equal expected_count, Topic.count, "no new records should have been created!" assert_equal "Jerry Carter", topics.first.author_name, "wrong author!" assert_equal "Chad Fowler", topics.last.author_name, "wrong author!" end end if ENV['AR_VERSION'].to_f >= 5.1 context "with virtual columns" do let(:books) { [Book.new(author_name: "foo", title: "bar")] } it "ignores virtual columns and creates record" do assert_difference "Book.count", +1 do Book.import books end end end end end end activerecord-import-0.28.2/test/support/sqlite3/0000755000004100000410000000000013424140671021676 5ustar www-datawww-dataactiverecord-import-0.28.2/test/support/sqlite3/import_examples.rb0000644000004100000410000002407013424140671025436 0ustar www-datawww-data# encoding: UTF-8 def should_support_sqlite3_import_functionality if ActiveRecord::Base.connection.supports_on_duplicate_key_update? should_support_sqlite_upsert_functionality end describe "#supports_imports?" do context "and SQLite is 3.7.11 or higher" do it "supports import" do version = ActiveRecord::ConnectionAdapters::SQLite3Adapter::Version.new("3.7.11") assert ActiveRecord::Base.supports_import?(version) version = ActiveRecord::ConnectionAdapters::SQLite3Adapter::Version.new("3.7.12") assert ActiveRecord::Base.supports_import?(version) end end context "and SQLite less than 3.7.11" do it "doesn't support import" do version = ActiveRecord::ConnectionAdapters::SQLite3Adapter::Version.new("3.7.10") assert !ActiveRecord::Base.supports_import?(version) end end end describe "#import" do it "imports with a single insert on SQLite 3.7.11 or higher" do assert_difference "Topic.count", +507 do result = Topic.import Build(7, :topics) assert_equal 1, result.num_inserts, "Failed to issue a single INSERT statement. Make sure you have a supported version of SQLite3 (3.7.11 or higher) installed" assert_equal 7, Topic.count, "Failed to insert all records. Make sure you have a supported version of SQLite3 (3.7.11 or higher) installed" result = Topic.import Build(500, :topics) assert_equal 1, result.num_inserts, "Failed to issue a single INSERT statement. Make sure you have a supported version of SQLite3 (3.7.11 or higher) installed" assert_equal 507, Topic.count, "Failed to insert all records. Make sure you have a supported version of SQLite3 (3.7.11 or higher) installed" end end it "imports with a two inserts on SQLite 3.7.11 or higher" do assert_difference "Topic.count", +501 do result = Topic.import Build(501, :topics) assert_equal 2, result.num_inserts, "Failed to issue a two INSERT statements. Make sure you have a supported version of SQLite3 (3.7.11 or higher) installed" assert_equal 501, Topic.count, "Failed to insert all records. Make sure you have a supported version of SQLite3 (3.7.11 or higher) installed" end end it "imports with a five inserts on SQLite 3.7.11 or higher" do assert_difference "Topic.count", +2500 do result = Topic.import Build(2500, :topics) assert_equal 5, result.num_inserts, "Failed to issue a two INSERT statements. Make sure you have a supported version of SQLite3 (3.7.11 or higher) installed" assert_equal 2500, Topic.count, "Failed to insert all records. Make sure you have a supported version of SQLite3 (3.7.11 or higher) installed" end end end end def should_support_sqlite_upsert_functionality should_support_basic_on_duplicate_key_update should_support_on_duplicate_key_ignore describe "#import" do extend ActiveSupport::TestCase::ImportAssertions macro(:perform_import) { raise "supply your own #perform_import in a context below" } macro(:updated_topic) { Topic.find(@topic.id) } context "with :on_duplicate_key_ignore and validation checks turned off" do let(:columns) { %w( id title author_name author_email_address parent_id ) } let(:values) { [[99, "Book", "John Doe", "john@doe.com", 17]] } let(:updated_values) { [[99, "Book - 2nd Edition", "Author Should Not Change", "johndoe@example.com", 57]] } setup do Topic.import columns, values, validate: false end it "should not update any records" do result = Topic.import columns, updated_values, on_duplicate_key_ignore: true, validate: false assert_equal [], result.ids end end context "with :on_duplicate_key_update and validation checks turned off" do asssertion_group(:should_support_on_duplicate_key_update) do should_not_update_fields_not_mentioned should_update_foreign_keys should_not_update_created_at_on_timestamp_columns should_update_updated_at_on_timestamp_columns end context "using a hash" do context "with :columns a hash" do let(:columns) { %w( id title author_name author_email_address parent_id ) } let(:values) { [[99, "Book", "John Doe", "john@doe.com", 17]] } let(:updated_values) { [[99, "Book - 2nd Edition", "Author Should Not Change", "johndoe@example.com", 57]] } macro(:perform_import) do |*opts| Topic.import columns, updated_values, opts.extract_options!.merge(on_duplicate_key_update: { conflict_target: :id, columns: update_columns }, validate: false) end setup do Topic.import columns, values, validate: false @topic = Topic.find 99 end it "should not modify the passed in :on_duplicate_key_update columns array" do assert_nothing_raised do columns = %w(title author_name).freeze Topic.import columns, [%w(foo, bar)], on_duplicate_key_update: { columns: columns } end end context "using string hash map" do let(:update_columns) { { "title" => "title", "author_email_address" => "author_email_address", "parent_id" => "parent_id" } } should_support_on_duplicate_key_update should_update_fields_mentioned end context "using string hash map, but specifying column mismatches" do let(:update_columns) { { "title" => "author_email_address", "author_email_address" => "title", "parent_id" => "parent_id" } } should_support_on_duplicate_key_update should_update_fields_mentioned_with_hash_mappings end context "using symbol hash map" do let(:update_columns) { { title: :title, author_email_address: :author_email_address, parent_id: :parent_id } } should_support_on_duplicate_key_update should_update_fields_mentioned end context "using symbol hash map, but specifying column mismatches" do let(:update_columns) { { title: :author_email_address, author_email_address: :title, parent_id: :parent_id } } should_support_on_duplicate_key_update should_update_fields_mentioned_with_hash_mappings end end context 'with :index_predicate' do let(:columns) { %w( id device_id alarm_type status metadata ) } let(:values) { [[99, 17, 1, 1, 'foo']] } let(:updated_values) { [[99, 17, 1, 2, 'bar']] } macro(:perform_import) do |*opts| Alarm.import columns, updated_values, opts.extract_options!.merge(on_duplicate_key_update: { conflict_target: [:device_id, :alarm_type], index_predicate: 'status <> 0', columns: [:status] }, validate: false) end macro(:updated_alarm) { Alarm.find(@alarm.id) } setup do Alarm.import columns, values, validate: false @alarm = Alarm.find 99 end context 'supports on duplicate key update for partial indexes' do it 'should not update created_at timestamp columns' do Timecop.freeze Chronic.parse("5 minutes from now") do perform_import assert_in_delta @alarm.created_at.to_i, updated_alarm.created_at.to_i, 1 end end it 'should update updated_at timestamp columns' do time = Chronic.parse("5 minutes from now") Timecop.freeze time do perform_import assert_in_delta time.to_i, updated_alarm.updated_at.to_i, 1 end end it 'should not update fields not mentioned' do perform_import assert_equal 'foo', updated_alarm.metadata end it 'should update fields mentioned with hash mappings' do perform_import assert_equal 2, updated_alarm.status end end end context 'with :condition' do let(:columns) { %w( id device_id alarm_type status metadata) } let(:values) { [[99, 17, 1, 1, 'foo']] } let(:updated_values) { [[99, 17, 1, 1, 'bar']] } macro(:perform_import) do |*opts| Alarm.import( columns, updated_values, opts.extract_options!.merge( on_duplicate_key_update: { conflict_target: [:id], condition: "alarms.metadata NOT LIKE '%foo%'", columns: [:metadata] }, validate: false ) ) end macro(:updated_alarm) { Alarm.find(@alarm.id) } setup do Alarm.import columns, values, validate: false @alarm = Alarm.find 99 end it 'should not update fields not matched' do perform_import assert_equal 'foo', updated_alarm.metadata end end context "with no :conflict_target" do context "with no primary key" do it "raises ArgumentError" do error = assert_raises ArgumentError do Rule.import Build(3, :rules), on_duplicate_key_update: [:condition_text], validate: false end assert_match(/Expected :conflict_target to be specified/, error.message) end end end context "with no :columns" do let(:columns) { %w( id title author_name author_email_address ) } let(:values) { [[100, "Book", "John Doe", "john@doe.com"]] } let(:updated_values) { [[100, "Title Should Not Change", "Author Should Not Change", "john@nogo.com"]] } macro(:perform_import) do |*opts| Topic.import columns, updated_values, opts.extract_options!.merge(on_duplicate_key_update: { conflict_target: :id }, validate: false) end setup do Topic.import columns, values, validate: false @topic = Topic.find 100 end should_update_updated_at_on_timestamp_columns end end end end end activerecord-import-0.28.2/test/support/active_support/0000755000004100000410000000000013424140671023361 5ustar www-datawww-dataactiverecord-import-0.28.2/test/support/active_support/test_case_extensions.rb0000644000004100000410000000352113424140671030140 0ustar www-datawww-dataclass ActiveSupport::TestCase include ActiveRecord::TestFixtures if ENV['AR_VERSION'].to_f >= 5.0 self.use_transactional_tests = true else self.use_transactional_fixtures = true end class << self def requires_active_record_version(version_string, &blk) return unless Gem::Dependency.new('', version_string).match?('', ActiveRecord::VERSION::STRING) instance_eval(&blk) end def assertion(name, &block) mc = class << self; self; end mc.class_eval do define_method(name) do it(name, &block) end end end def asssertion_group(name, &block) mc = class << self; self; end mc.class_eval do define_method(name, &block) end end def macro(name, &block) class_eval do define_method(name, &block) end end def describe(description, toplevel = nil, &blk) text = toplevel ? description : "#{name} #{description}" klass = Class.new(self) klass.class_eval <<-RUBY_EVAL def self.name "#{text}" end RUBY_EVAL # do not inherit test methods from the superclass klass.class_eval do instance_methods.grep(/^test.+/) do |method| undef_method method end end klass.instance_eval(&blk) end alias context describe def let(name, &blk) define_method(name) do instance_variable_name = "@__let_#{name}" return instance_variable_get(instance_variable_name) if instance_variable_defined?(instance_variable_name) instance_variable_set(instance_variable_name, instance_eval(&blk)) end end def it(description, &blk) define_method("test_#{name}_#{description}", &blk) end end end def describe(description, &blk) ActiveSupport::TestCase.describe(description, true, &blk) end activerecord-import-0.28.2/test/support/shared_examples/0000755000004100000410000000000013424140671023456 5ustar www-datawww-dataactiverecord-import-0.28.2/test/support/shared_examples/recursive_import.rb0000644000004100000410000001401013424140671027400 0ustar www-datawww-datadef should_support_recursive_import describe "importing objects with associations" do let(:new_topics) { Build(num_topics, :topic_with_book) } let(:new_topics_with_invalid_chapter) do chapter = new_topics.first.books.first.chapters.first chapter.title = nil new_topics end let(:num_topics) { 3 } let(:num_books) { 6 } let(:num_chapters) { 18 } let(:num_endnotes) { 24 } let(:new_question_with_rule) { FactoryBot.build :question, :with_rule } it 'imports top level' do assert_difference "Topic.count", +num_topics do Topic.import new_topics, recursive: true new_topics.each do |topic| assert_not_nil topic.id end end end it 'imports first level associations' do assert_difference "Book.count", +num_books do Topic.import new_topics, recursive: true new_topics.each do |topic| topic.books.each do |book| assert_equal topic.id, book.topic_id end end end end it 'imports polymorphic associations' do discounts = Array.new(1) { |i| Discount.new(amount: i) } books = Array.new(1) { |i| Book.new(author_name: "Author ##{i}", title: "Book ##{i}") } books.each do |book| book.discounts << discounts end Book.import books, recursive: true books.each do |book| book.discounts.each do |discount| assert_not_nil discount.discountable_id assert_equal 'Book', discount.discountable_type end end end it 'imports polymorphic associations from subclass' do discounts = Array.new(1) { |i| Discount.new(amount: i) } dictionaries = Array.new(1) { |i| Dictionary.new(author_name: "Author ##{i}", title: "Book ##{i}") } dictionaries.each do |dictionary| dictionary.discounts << discounts end Dictionary.import dictionaries, recursive: true assert_equal 1, Dictionary.last.discounts.count dictionaries.each do |dictionary| dictionary.discounts.each do |discount| assert_not_nil discount.discountable_id assert_equal 'Book', discount.discountable_type end end end [{ recursive: false }, {}].each do |import_options| it "skips recursion for #{import_options}" do assert_difference "Book.count", 0 do Topic.import new_topics, import_options end end end it 'imports deeper nested associations' do assert_difference "Chapter.count", +num_chapters do assert_difference "EndNote.count", +num_endnotes do Topic.import new_topics, recursive: true new_topics.each do |topic| topic.books.each do |book| book.chapters.each do |chapter| assert_equal book.id, chapter.book_id end book.end_notes.each do |endnote| assert_equal book.id, endnote.book_id end end end end end end # Models are only valid if all associations are valid it "only imports models with valid associations" do assert_difference "Topic.count", 2 do assert_difference "Book.count", 4 do assert_difference "Chapter.count", 12 do assert_difference "EndNote.count", 16 do Topic.import new_topics_with_invalid_chapter, recursive: true end end end end end it "skips validation of the associations if requested" do assert_difference "Chapter.count", +num_chapters do Topic.import new_topics_with_invalid_chapter, validate: false, recursive: true end end it 'imports has_one associations' do assert_difference 'Rule.count' do Question.import [new_question_with_rule], recursive: true end end it "imports an imported belongs_to association id" do first_new_topic = new_topics[0] second_new_topic = new_topics[1] books = first_new_topic.books.to_a Topic.import new_topics, validate: false assert_difference "Book.count", books.size do Book.import books, validate: false end books.each do |book| assert_equal book.topic_id, first_new_topic.id end books.each { |book| book.topic_id = second_new_topic.id } assert_no_difference "Book.count", books.size do Book.import books, validate: false, on_duplicate_key_update: [:topic_id] end books.each do |book| assert_equal book.topic_id, second_new_topic.id end end unless ENV["SKIP_COMPOSITE_PK"] describe "with composite primary keys" do it "should import models and set id" do tags = [] tags << Tag.new(tag_id: 1, publisher_id: 1, tag: 'Mystery') tags << Tag.new(tag_id: 2, publisher_id: 1, tag: 'Science') assert_difference "Tag.count", +2 do Tag.import tags end assert_equal 1, tags[0].tag_id assert_equal 2, tags[1].tag_id end end end describe "all_or_none" do [Book, Chapter, Topic, EndNote].each do |type| it "creates #{type}" do assert_difference "#{type}.count", 0 do Topic.import new_topics_with_invalid_chapter, all_or_none: true, recursive: true end end end end # If adapter supports on_duplicate_key_update, it is only applied to top level models so that SQL with invalid # columns, keys, etc isn't generated for child associations when doing recursive import if ActiveRecord::Base.connection.supports_on_duplicate_key_update? describe "on_duplicate_key_update" do let(:new_topics) { Build(1, :topic_with_book) } it "imports objects with associations" do assert_difference "Topic.count", +1 do Topic.import new_topics, recursive: true, on_duplicate_key_update: [:updated_at], validate: false new_topics.each do |topic| assert_not_nil topic.id end end end end end end end activerecord-import-0.28.2/test/support/shared_examples/on_duplicate_key_ignore.rb0000644000004100000410000000304613424140671030667 0ustar www-datawww-datadef should_support_on_duplicate_key_ignore describe "#import" do extend ActiveSupport::TestCase::ImportAssertions let(:topic) { Topic.create!(title: "Book", author_name: "John Doe") } let(:topics) { [topic] } context "with :on_duplicate_key_ignore" do it "should skip duplicates and continue import" do topics << Topic.new(title: "Book 2", author_name: "Jane Doe") assert_difference "Topic.count", +1 do result = Topic.import topics, on_duplicate_key_ignore: true, validate: false assert_not_equal topics.first.id, result.ids.first assert_nil topics.last.id end end unless ENV["SKIP_COMPOSITE_PK"] context "with composite primary keys" do it "should import array of values successfully" do columns = [:tag_id, :publisher_id, :tag] values = [[1, 1, 'Mystery'], [1, 1, 'Science']] assert_difference "Tag.count", +1 do Tag.import columns, values, on_duplicate_key_ignore: true, validate: false end assert_equal 'Mystery', Tag.first.tag end end end end context "with :ignore" do it "should skip duplicates and continue import" do topics << Topic.new(title: "Book 2", author_name: "Jane Doe") assert_difference "Topic.count", +1 do result = Topic.import topics, ignore: true, validate: false assert_not_equal topics.first.id, result.ids.first assert_nil topics.last.id end end end end end activerecord-import-0.28.2/test/support/shared_examples/on_duplicate_key_update.rb0000644000004100000410000003325113424140671030667 0ustar www-datawww-datadef should_support_basic_on_duplicate_key_update describe "#import" do extend ActiveSupport::TestCase::ImportAssertions macro(:perform_import) { raise "supply your own #perform_import in a context below" } macro(:updated_topic) { Topic.find(@topic.id) } context "with lock_version upsert" do describe 'optimistic lock' do it 'lock_version upsert after on_duplcate_key_update by model' do users = [ User.new(name: 'Salomon'), User.new(name: 'Nathan') ] User.import(users) assert User.count == users.length User.all.each do |user| assert_equal 0, user.lock_version end updated_users = User.all.map do |user| user.name += ' Rothschild' user end User.import(updated_users, on_duplicate_key_update: [:name]) assert User.count == updated_users.length User.all.each_with_index do |user, i| assert_equal user.name, users[i].name + ' Rothschild' assert_equal 1, user.lock_version end end it 'lock_version upsert after on_duplcate_key_update by array' do users = [ User.new(name: 'Salomon'), User.new(name: 'Nathan') ] User.import(users) assert User.count == users.length User.all.each do |user| assert_equal 0, user.lock_version end columns = [:id, :name] updated_values = User.all.map do |user| user.name += ' Rothschild' [user.id, user.name] end User.import(columns, updated_values, on_duplicate_key_update: [:name]) assert User.count == updated_values.length User.all.each_with_index do |user, i| assert_equal user.name, users[i].name + ' Rothschild' assert_equal 1, user.lock_version end end it 'lock_version upsert after on_duplcate_key_update by hash' do users = [ User.new(name: 'Salomon'), User.new(name: 'Nathan') ] User.import(users) assert User.count == users.length User.all.each do |user| assert_equal 0, user.lock_version end updated_values = User.all.map do |user| user.name += ' Rothschild' { id: user.id, name: user.name } end User.import(updated_values, on_duplicate_key_update: [:name]) assert User.count == updated_values.length User.all.each_with_index do |user, i| assert_equal user.name, users[i].name + ' Rothschild' assert_equal 1, user.lock_version end end it 'upsert optimistic lock columns other than lock_version by model' do accounts = [ Account.new(name: 'Salomon'), Account.new(name: 'Nathan') ] Account.import(accounts) assert Account.count == accounts.length Account.all.each do |user| assert_equal 0, user.lock end updated_accounts = Account.all.map do |user| user.name += ' Rothschild' user end Account.import(updated_accounts, on_duplicate_key_update: [:id, :name]) assert Account.count == updated_accounts.length Account.all.each_with_index do |user, i| assert_equal user.name, accounts[i].name + ' Rothschild' assert_equal 1, user.lock end end it 'upsert optimistic lock columns other than lock_version by array' do accounts = [ Account.new(name: 'Salomon'), Account.new(name: 'Nathan') ] Account.import(accounts) assert Account.count == accounts.length Account.all.each do |user| assert_equal 0, user.lock end columns = [:id, :name] updated_values = Account.all.map do |user| user.name += ' Rothschild' [user.id, user.name] end Account.import(columns, updated_values, on_duplicate_key_update: [:name]) assert Account.count == updated_values.length Account.all.each_with_index do |user, i| assert_equal user.name, accounts[i].name + ' Rothschild' assert_equal 1, user.lock end end it 'upsert optimistic lock columns other than lock_version by hash' do accounts = [ Account.new(name: 'Salomon'), Account.new(name: 'Nathan') ] Account.import(accounts) assert Account.count == accounts.length Account.all.each do |user| assert_equal 0, user.lock end updated_values = Account.all.map do |user| user.name += ' Rothschild' { id: user.id, name: user.name } end Account.import(updated_values, on_duplicate_key_update: [:name]) assert Account.count == updated_values.length Account.all.each_with_index do |user, i| assert_equal user.name, accounts[i].name + ' Rothschild' assert_equal 1, user.lock end end it 'update the lock_version of models separated by namespaces by model' do makers = [ Bike::Maker.new(name: 'Yamaha'), Bike::Maker.new(name: 'Honda') ] Bike::Maker.import(makers) assert Bike::Maker.count == makers.length Bike::Maker.all.each do |maker| assert_equal 0, maker.lock_version end updated_makers = Bike::Maker.all.map do |maker| maker.name += ' bikes' maker end Bike::Maker.import(updated_makers, on_duplicate_key_update: [:name]) assert Bike::Maker.count == updated_makers.length Bike::Maker.all.each_with_index do |maker, i| assert_equal maker.name, makers[i].name + ' bikes' assert_equal 1, maker.lock_version end end it 'update the lock_version of models separated by namespaces by array' do makers = [ Bike::Maker.new(name: 'Yamaha'), Bike::Maker.new(name: 'Honda') ] Bike::Maker.import(makers) assert Bike::Maker.count == makers.length Bike::Maker.all.each do |maker| assert_equal 0, maker.lock_version end columns = [:id, :name] updated_values = Bike::Maker.all.map do |maker| maker.name += ' bikes' [maker.id, maker.name] end Bike::Maker.import(columns, updated_values, on_duplicate_key_update: [:name]) assert Bike::Maker.count == updated_values.length Bike::Maker.all.each_with_index do |maker, i| assert_equal maker.name, makers[i].name + ' bikes' assert_equal 1, maker.lock_version end end it 'update the lock_version of models separated by namespaces by hash' do makers = [ Bike::Maker.new(name: 'Yamaha'), Bike::Maker.new(name: 'Honda') ] Bike::Maker.import(makers) assert Bike::Maker.count == makers.length Bike::Maker.all.each do |maker| assert_equal 0, maker.lock_version end updated_values = Bike::Maker.all.map do |maker| maker.name += ' bikes' { id: maker.id, name: maker.name } end Bike::Maker.import(updated_values, on_duplicate_key_update: [:name]) assert Bike::Maker.count == updated_values.length Bike::Maker.all.each_with_index do |maker, i| assert_equal maker.name, makers[i].name + ' bikes' assert_equal 1, maker.lock_version end end end end context "with :on_duplicate_key_update" do describe 'using :all' do let(:columns) { %w( id title author_name author_email_address parent_id ) } let(:updated_values) { [[99, "Book - 2nd Edition", "Jane Doe", "janedoe@example.com", 57]] } macro(:perform_import) do |*opts| Topic.import columns, updated_values, opts.extract_options!.merge(on_duplicate_key_update: :all, validate: false) end setup do values = [[99, "Book", "John Doe", "john@doe.com", 17, 3]] Topic.import columns + ['replies_count'], values, validate: false end it 'updates all specified columns' do perform_import updated_topic = Topic.find(99) assert_equal 'Book - 2nd Edition', updated_topic.title assert_equal 'Jane Doe', updated_topic.author_name assert_equal 'janedoe@example.com', updated_topic.author_email_address assert_equal 57, updated_topic.parent_id assert_equal 3, updated_topic.replies_count end end describe "argument safety" do it "should not modify the passed in :on_duplicate_key_update array" do assert_nothing_raised do columns = %w(title author_name).freeze Topic.import columns, [%w(foo, bar)], on_duplicate_key_update: columns end end end context "with timestamps enabled" do let(:time) { Chronic.parse("5 minutes from now") } it 'should not overwrite changed updated_at with current timestamp' do topic = Topic.create(author_name: "Jane Doe", title: "Book") timestamp = Time.now.utc topic.updated_at = timestamp Topic.import [topic], on_duplicate_key_update: :all, validate: false assert_equal timestamp.to_s, Topic.last.updated_at.to_s end it 'should update updated_at with current timestamp' do topic = Topic.create(author_name: "Jane Doe", title: "Book") Timecop.freeze(time) do Topic.import [topic], on_duplicate_key_update: [:updated_at], validate: false assert_in_delta time.to_i, topic.reload.updated_at.to_i, 1.second end end end context "with validation checks turned off" do asssertion_group(:should_support_on_duplicate_key_update) do should_not_update_fields_not_mentioned should_update_foreign_keys should_not_update_created_at_on_timestamp_columns should_update_updated_at_on_timestamp_columns end let(:columns) { %w( id title author_name author_email_address parent_id ) } let(:values) { [[99, "Book", "John Doe", "john@doe.com", 17]] } let(:updated_values) { [[99, "Book - 2nd Edition", "Author Should Not Change", "johndoe@example.com", 57]] } macro(:perform_import) do |*opts| Topic.import columns, updated_values, opts.extract_options!.merge(on_duplicate_key_update: update_columns, validate: false) end setup do Topic.import columns, values, validate: false @topic = Topic.find 99 end context "using an empty array" do let(:update_columns) { [] } should_not_update_fields_not_mentioned should_update_updated_at_on_timestamp_columns end context "using string column names" do let(:update_columns) { %w(title author_email_address parent_id) } should_support_on_duplicate_key_update should_update_fields_mentioned end context "using symbol column names" do let(:update_columns) { [:title, :author_email_address, :parent_id] } should_support_on_duplicate_key_update should_update_fields_mentioned end end context "with a table that has a non-standard primary key" do let(:columns) { [:promotion_id, :code] } let(:values) { [[1, 'DISCOUNT1']] } let(:updated_values) { [[1, 'DISCOUNT2']] } let(:update_columns) { [:code] } macro(:perform_import) do |*opts| Promotion.import columns, updated_values, opts.extract_options!.merge(on_duplicate_key_update: update_columns, validate: false) end macro(:updated_promotion) { Promotion.find(@promotion.promotion_id) } setup do Promotion.import columns, values, validate: false @promotion = Promotion.find 1 end it "should update specified columns" do perform_import assert_equal 'DISCOUNT2', updated_promotion.code end end unless ENV["SKIP_COMPOSITE_PK"] context "with composite primary keys" do it "should import array of values successfully" do columns = [:tag_id, :publisher_id, :tag] Tag.import columns, [[1, 1, 'Mystery']], validate: false assert_difference "Tag.count", +0 do Tag.import columns, [[1, 1, 'Science']], on_duplicate_key_update: [:tag], validate: false end assert_equal 'Science', Tag.first.tag end end end end context "with :on_duplicate_key_update turned off" do let(:columns) { %w( id title author_name author_email_address parent_id ) } let(:values) { [[100, "Book", "John Doe", "john@doe.com", 17]] } let(:updated_values) { [[100, "Book - 2nd Edition", "This should raise an exception", "john@nogo.com", 57]] } macro(:perform_import) do |*opts| # `on_duplicate_key_update: false` is the tested feature Topic.import columns, updated_values, opts.extract_options!.merge(on_duplicate_key_update: false, validate: false) end setup do Topic.import columns, values, validate: false @topic = Topic.find 100 end it "should raise ActiveRecord::RecordNotUnique" do assert_raise ActiveRecord::RecordNotUnique do perform_import end end end end end activerecord-import-0.28.2/test/support/assertions.rb0000644000004100000410000000542013424140671023032 0ustar www-datawww-dataclass ActiveSupport::TestCase module ImportAssertions def self.extended(klass) klass.instance_eval do assertion(:should_not_update_created_at_on_timestamp_columns) do Timecop.freeze Chronic.parse("5 minutes from now") do perform_import assert_in_delta @topic.created_at.to_i, updated_topic.created_at.to_i, 1 assert_in_delta @topic.created_on.to_i, updated_topic.created_on.to_i, 1 end end assertion(:should_update_updated_at_on_timestamp_columns) do time = Chronic.parse("5 minutes from now") Timecop.freeze time do perform_import assert_in_delta time.to_i, updated_topic.updated_at.to_i, 1 assert_in_delta time.to_i, updated_topic.updated_on.to_i, 1 end end assertion(:should_not_update_updated_at_on_timestamp_columns) do time = Chronic.parse("5 minutes from now") Timecop.freeze time do perform_import assert_in_delta @topic.updated_at.to_i, updated_topic.updated_at.to_i, 1 assert_in_delta @topic.updated_on.to_i, updated_topic.updated_on.to_i, 1 end end assertion(:should_not_update_timestamps) do Timecop.freeze Chronic.parse("5 minutes from now") do perform_import timestamps: false assert_in_delta @topic.created_at.to_i, updated_topic.created_at.to_i, 1 assert_in_delta @topic.created_on.to_i, updated_topic.created_on.to_i, 1 assert_in_delta @topic.updated_at.to_i, updated_topic.updated_at.to_i, 1 assert_in_delta @topic.updated_on.to_i, updated_topic.updated_on.to_i, 1 end end assertion(:should_not_update_fields_not_mentioned) do assert_equal "John Doe", updated_topic.author_name end assertion(:should_update_fields_mentioned) do perform_import assert_equal "Book - 2nd Edition", updated_topic.title assert_equal "johndoe@example.com", updated_topic.author_email_address end assertion(:should_raise_update_fields_mentioned) do assert_raise ActiveRecord::RecordNotUnique do perform_import end assert_equal "Book", updated_topic.title assert_equal "john@doe.com", updated_topic.author_email_address end assertion(:should_update_fields_mentioned_with_hash_mappings) do perform_import assert_equal "johndoe@example.com", updated_topic.title assert_equal "Book - 2nd Edition", updated_topic.author_email_address end assertion(:should_update_foreign_keys) do perform_import assert_equal 57, updated_topic.parent_id end end end end end activerecord-import-0.28.2/test/support/generate.rb0000644000004100000410000000156013424140671022433 0ustar www-datawww-dataclass ActiveSupport::TestCase def Build(*args) # rubocop:disable Style/MethodName n = args.shift if args.first.is_a?(Numeric) factory = args.shift factory_bot_args = args.shift || {} if n [].tap do |collection| n.times.each { collection << FactoryBot.build(factory.to_s.singularize.to_sym, factory_bot_args) } end else FactoryBot.build(factory.to_s.singularize.to_sym, factory_bot_args) end end def Generate(*args) # rubocop:disable Style/MethodName n = args.shift if args.first.is_a?(Numeric) factory = args.shift factory_bot_args = args.shift || {} if n [].tap do |collection| n.times.each { collection << FactoryBot.create(factory.to_s.singularize.to_sym, factory_bot_args) } end else FactoryBot.create(factory.to_s.singularize.to_sym, factory_bot_args) end end end activerecord-import-0.28.2/test/support/postgresql/0000755000004100000410000000000013424140671022515 5ustar www-datawww-dataactiverecord-import-0.28.2/test/support/postgresql/import_examples.rb0000644000004100000410000004511113424140671026254 0ustar www-datawww-data# encoding: UTF-8 def should_support_postgresql_import_functionality should_support_recursive_import if ActiveRecord::Base.connection.supports_on_duplicate_key_update? should_support_postgresql_upsert_functionality end describe "#supports_imports?" do it "should support import" do assert ActiveRecord::Base.supports_import? end end describe "#import" do it "should import with a single insert" do # see ActiveRecord::ConnectionAdapters::AbstractAdapter test for more specifics assert_difference "Topic.count", +10 do result = Topic.import Build(3, :topics) assert_equal 1, result.num_inserts result = Topic.import Build(7, :topics) assert_equal 1, result.num_inserts end end context "setting attributes and marking clean" do let(:topic) { Build(:topics) } setup { Topic.import([topic]) } it "assigns ids" do assert topic.id.present? end it "marks models as clean" do assert !topic.changed? end it "marks models as persisted" do assert !topic.new_record? assert topic.persisted? end it "assigns timestamps" do assert topic.created_at.present? assert topic.updated_at.present? end end describe "with query cache enabled" do setup do unless ActiveRecord::Base.connection.query_cache_enabled ActiveRecord::Base.connection.enable_query_cache! @disable_cache_on_teardown = true end end it "clears cache on insert" do before_import = Topic.all.to_a Topic.import(Build(2, :topics), validate: false) after_import = Topic.all.to_a assert_equal 2, after_import.size - before_import.size end teardown do if @disable_cache_on_teardown ActiveRecord::Base.connection.disable_query_cache! end end end describe "no_returning" do let(:books) { [Book.new(author_name: "foo", title: "bar")] } it "creates records" do assert_difference "Book.count", +1 do Book.import books, no_returning: true end end it "returns no ids" do assert_equal [], Book.import(books, no_returning: true).ids end end describe "returning" do let(:books) { [Book.new(author_name: "King", title: "It")] } let(:result) { Book.import(books, returning: %w(author_name title)) } let(:book_id) do if RUBY_PLATFORM == 'java' || ENV['AR_VERSION'].to_i >= 5.0 books.first.id else books.first.id.to_s end end it "creates records" do assert_difference("Book.count", +1) { result } end it "returns ids" do result assert_equal [book_id], result.ids end it "returns specified columns" do assert_equal [%w(King It)], result.results end context "when primary key and returning overlap" do let(:result) { Book.import(books, returning: %w(id title)) } setup { result } it "returns ids" do assert_equal [book_id], result.ids end it "returns specified columns" do assert_equal [[book_id, 'It']], result.results end end context "setting model attributes" do let(:code) { 'abc' } let(:discount) { 0.10 } let(:original_promotion) do Promotion.new(code: code, discount: discount) end let(:updated_promotion) do Promotion.new(code: code, description: 'ABC discount') end let(:returning_columns) { %w(discount) } setup do Promotion.import([original_promotion]) Promotion.import([updated_promotion], on_duplicate_key_update: { conflict_target: %i(code), columns: %i(description) }, returning: returning_columns) end it "sets model attributes" do assert_equal updated_promotion.discount, discount end context "returning multiple columns" do let(:returning_columns) { %w(discount description) } it "sets model attributes" do assert_equal updated_promotion.discount, discount end end end end end if ENV['AR_VERSION'].to_f >= 4.0 describe "with a uuid primary key" do let(:vendor) { Vendor.new(name: "foo") } let(:vendors) { [vendor] } it "creates records" do assert_difference "Vendor.count", +1 do Vendor.import vendors end end it "assigns an id to the model objects" do Vendor.import vendors assert_not_nil vendor.id end end describe "with an assigned uuid primary key" do let(:id) { SecureRandom.uuid } let(:vendor) { Vendor.new(id: id, name: "foo") } let(:vendors) { [vendor] } it "creates records with correct id" do assert_difference "Vendor.count", +1 do Vendor.import vendors end assert_equal id, vendor.id end end end describe "with store accessor fields" do if ENV['AR_VERSION'].to_f >= 4.0 it "imports values for json fields" do vendors = [Vendor.new(name: 'Vendor 1', size: 100)] assert_difference "Vendor.count", +1 do Vendor.import vendors end assert_equal(100, Vendor.first.size) end it "imports values for hstore fields" do vendors = [Vendor.new(name: 'Vendor 1', contact: 'John Smith')] assert_difference "Vendor.count", +1 do Vendor.import vendors end assert_equal('John Smith', Vendor.first.contact) end end if ENV['AR_VERSION'].to_f >= 4.2 it "imports values for jsonb fields" do vendors = [Vendor.new(name: 'Vendor 1', charge_code: '12345')] assert_difference "Vendor.count", +1 do Vendor.import vendors end assert_equal('12345', Vendor.first.charge_code) end end end if ENV['AR_VERSION'].to_f >= 4.2 describe "with serializable fields" do it "imports default values as correct data type" do vendors = [Vendor.new(name: 'Vendor 1')] assert_difference "Vendor.count", +1 do Vendor.import vendors end assert_equal({}, Vendor.first.json_data) end end end describe "with binary field" do let(:binary_value) { "\xE0'c\xB2\xB0\xB3Bh\\\xC2M\xB1m\\I\xC4r".force_encoding('ASCII-8BIT') } it "imports the correct values for binary fields" do alarms = [Alarm.new(device_id: 1, alarm_type: 1, status: 1, secret_key: binary_value)] assert_difference "Alarm.count", +1 do Alarm.import alarms end assert_equal(binary_value, Alarm.first.secret_key) end end end def should_support_postgresql_upsert_functionality should_support_basic_on_duplicate_key_update should_support_on_duplicate_key_ignore describe "#import" do extend ActiveSupport::TestCase::ImportAssertions macro(:perform_import) { raise "supply your own #perform_import in a context below" } macro(:updated_topic) { Topic.find(@topic.id) } context "with :on_duplicate_key_ignore and validation checks turned off" do let(:columns) { %w( id title author_name author_email_address parent_id ) } let(:values) { [[99, "Book", "John Doe", "john@doe.com", 17]] } let(:updated_values) { [[99, "Book - 2nd Edition", "Author Should Not Change", "johndoe@example.com", 57]] } setup do Topic.import columns, values, validate: false end it "should not update any records" do result = Topic.import columns, updated_values, on_duplicate_key_ignore: true, validate: false assert_equal [], result.ids end end context "with :on_duplicate_key_ignore and :recursive enabled" do let(:new_topic) { Build(1, :topic_with_book) } let(:mixed_topics) { Build(1, :topic_with_book) + new_topic + Build(1, :topic_with_book) } setup do Topic.import new_topic, recursive: true end # Recursive import depends on the primary keys of the parent model being returned # on insert. With on_duplicate_key_ignore enabled, not all ids will be returned # and it is possible that a model will be assigned the wrong id and then its children # would be associated with the wrong parent. it ":on_duplicate_key_ignore is ignored" do assert_raise ActiveRecord::RecordNotUnique do Topic.import mixed_topics, recursive: true, on_duplicate_key_ignore: true, validate: false end end end context "with :on_duplicate_key_update and validation checks turned off" do asssertion_group(:should_support_on_duplicate_key_update) do should_not_update_fields_not_mentioned should_update_foreign_keys should_not_update_created_at_on_timestamp_columns should_update_updated_at_on_timestamp_columns end context "using a hash" do context "with :columns :all" do let(:columns) { %w( id title author_name author_email_address parent_id ) } let(:updated_values) { [[99, "Book - 2nd Edition", "Jane Doe", "janedoe@example.com", 57]] } macro(:perform_import) do |*opts| Topic.import columns, updated_values, opts.extract_options!.merge(on_duplicate_key_update: { conflict_target: :id, columns: :all }, validate: false) end setup do values = [[99, "Book", "John Doe", "john@doe.com", 17, 3]] Topic.import columns + ['replies_count'], values, validate: false end it "should update all specified columns" do perform_import updated_topic = Topic.find(99) assert_equal 'Book - 2nd Edition', updated_topic.title assert_equal 'Jane Doe', updated_topic.author_name assert_equal 'janedoe@example.com', updated_topic.author_email_address assert_equal 57, updated_topic.parent_id assert_equal 3, updated_topic.replies_count end end context "with :columns a hash" do let(:columns) { %w( id title author_name author_email_address parent_id ) } let(:values) { [[99, "Book", "John Doe", "john@doe.com", 17]] } let(:updated_values) { [[99, "Book - 2nd Edition", "Author Should Not Change", "johndoe@example.com", 57]] } macro(:perform_import) do |*opts| Topic.import columns, updated_values, opts.extract_options!.merge(on_duplicate_key_update: { conflict_target: :id, columns: update_columns }, validate: false) end setup do Topic.import columns, values, validate: false @topic = Topic.find 99 end it "should not modify the passed in :on_duplicate_key_update columns array" do assert_nothing_raised do columns = %w(title author_name).freeze Topic.import columns, [%w(foo, bar)], { on_duplicate_key_update: { columns: columns }.freeze }.freeze end end context "using string hash map" do let(:update_columns) { { "title" => "title", "author_email_address" => "author_email_address", "parent_id" => "parent_id" } } should_support_on_duplicate_key_update should_update_fields_mentioned end context "using string hash map, but specifying column mismatches" do let(:update_columns) { { "title" => "author_email_address", "author_email_address" => "title", "parent_id" => "parent_id" } } should_support_on_duplicate_key_update should_update_fields_mentioned_with_hash_mappings end context "using symbol hash map" do let(:update_columns) { { title: :title, author_email_address: :author_email_address, parent_id: :parent_id } } should_support_on_duplicate_key_update should_update_fields_mentioned end context "using symbol hash map, but specifying column mismatches" do let(:update_columns) { { title: :author_email_address, author_email_address: :title, parent_id: :parent_id } } should_support_on_duplicate_key_update should_update_fields_mentioned_with_hash_mappings end end context 'with :index_predicate' do let(:columns) { %w( id device_id alarm_type status metadata ) } let(:values) { [[99, 17, 1, 1, 'foo']] } let(:updated_values) { [[99, 17, 1, 2, 'bar']] } macro(:perform_import) do |*opts| Alarm.import columns, updated_values, opts.extract_options!.merge(on_duplicate_key_update: { conflict_target: [:device_id, :alarm_type], index_predicate: 'status <> 0', columns: [:status] }, validate: false) end macro(:updated_alarm) { Alarm.find(@alarm.id) } setup do Alarm.import columns, values, validate: false @alarm = Alarm.find 99 end context 'supports on duplicate key update for partial indexes' do it 'should not update created_at timestamp columns' do Timecop.freeze Chronic.parse("5 minutes from now") do perform_import assert_in_delta @alarm.created_at.to_i, updated_alarm.created_at.to_i, 1 end end it 'should update updated_at timestamp columns' do time = Chronic.parse("5 minutes from now") Timecop.freeze time do perform_import assert_in_delta time.to_i, updated_alarm.updated_at.to_i, 1 end end it 'should not update fields not mentioned' do perform_import assert_equal 'foo', updated_alarm.metadata end it 'should update fields mentioned with hash mappings' do perform_import assert_equal 2, updated_alarm.status end end end context 'with :condition' do let(:columns) { %w( id device_id alarm_type status metadata) } let(:values) { [[99, 17, 1, 1, 'foo']] } let(:updated_values) { [[99, 17, 1, 1, 'bar']] } macro(:perform_import) do |*opts| Alarm.import( columns, updated_values, opts.extract_options!.merge( on_duplicate_key_update: { conflict_target: [:id], condition: "alarms.metadata NOT LIKE '%foo%'", columns: [:metadata] }, validate: false ) ) end macro(:updated_alarm) { Alarm.find(@alarm.id) } setup do Alarm.import columns, values, validate: false @alarm = Alarm.find 99 end it 'should not update fields not matched' do perform_import assert_equal 'foo', updated_alarm.metadata end end context "with :constraint_name" do let(:columns) { %w( id title author_name author_email_address parent_id ) } let(:values) { [[100, "Book", "John Doe", "john@doe.com", 17]] } let(:updated_values) { [[100, "Book - 2nd Edition", "Author Should Not Change", "johndoe@example.com", 57]] } macro(:perform_import) do |*opts| Topic.import columns, updated_values, opts.extract_options!.merge(on_duplicate_key_update: { constraint_name: :topics_pkey, columns: update_columns }, validate: false) end setup do Topic.import columns, values, validate: false @topic = Topic.find 100 end let(:update_columns) { [:title, :author_email_address, :parent_id] } should_support_on_duplicate_key_update should_update_fields_mentioned end context "default to the primary key" do let(:columns) { %w( id title author_name author_email_address parent_id ) } let(:values) { [[100, "Book", "John Doe", "john@doe.com", 17]] } let(:updated_values) { [[100, "Book - 2nd Edition", "Author Should Not Change", "johndoe@example.com", 57]] } let(:update_columns) { [:title, :author_email_address, :parent_id] } setup do Topic.import columns, values, validate: false @topic = Topic.find 100 end context "with no :conflict_target or :constraint_name" do macro(:perform_import) do |*opts| Topic.import columns, updated_values, opts.extract_options!.merge(on_duplicate_key_update: { columns: update_columns }, validate: false) end should_support_on_duplicate_key_update should_update_fields_mentioned end context "with empty value for :conflict_target" do macro(:perform_import) do |*opts| Topic.import columns, updated_values, opts.extract_options!.merge(on_duplicate_key_update: { conflict_target: [], columns: update_columns }, validate: false) end should_support_on_duplicate_key_update should_update_fields_mentioned end context "with empty value for :constraint_name" do macro(:perform_import) do |*opts| Topic.import columns, updated_values, opts.extract_options!.merge(on_duplicate_key_update: { constraint_name: '', columns: update_columns }, validate: false) end should_support_on_duplicate_key_update should_update_fields_mentioned end end context "with no :conflict_target or :constraint_name" do context "with no primary key" do it "raises ArgumentError" do error = assert_raises ArgumentError do Rule.import Build(3, :rules), on_duplicate_key_update: [:condition_text], validate: false end assert_match(/Expected :conflict_target or :constraint_name to be specified/, error.message) end end end context "with no :columns" do let(:columns) { %w( id title author_name author_email_address ) } let(:values) { [[100, "Book", "John Doe", "john@doe.com"]] } let(:updated_values) { [[100, "Title Should Not Change", "Author Should Not Change", "john@nogo.com"]] } macro(:perform_import) do |*opts| Topic.import columns, updated_values, opts.extract_options!.merge(on_duplicate_key_update: { conflict_target: :id }, validate: false) end setup do Topic.import columns, values, validate: false @topic = Topic.find 100 end should_update_updated_at_on_timestamp_columns end end end end end activerecord-import-0.28.2/test/postgresql/0000755000004100000410000000000013424140671021001 5ustar www-datawww-dataactiverecord-import-0.28.2/test/postgresql/import_test.rb0000644000004100000410000000032113424140671023673 0ustar www-datawww-datarequire File.expand_path(File.dirname(__FILE__) + '/../test_helper') require File.expand_path(File.dirname(__FILE__) + '/../support/postgresql/import_examples') should_support_postgresql_import_functionality activerecord-import-0.28.2/test/jdbcsqlite3/0000755000004100000410000000000013424140671021005 5ustar www-datawww-dataactiverecord-import-0.28.2/test/jdbcsqlite3/import_test.rb0000644000004100000410000000031313424140671023700 0ustar www-datawww-datarequire File.expand_path(File.dirname(__FILE__) + '/../test_helper') require File.expand_path(File.dirname(__FILE__) + '/../support/sqlite3/import_examples') should_support_sqlite3_import_functionality activerecord-import-0.28.2/test/value_sets_bytes_parser_test.rb0000644000004100000410000001003213424140671025112 0ustar www-datawww-datarequire File.expand_path(File.dirname(__FILE__) + '/test_helper') require 'activerecord-import/value_sets_parser' describe ActiveRecord::Import::ValueSetsBytesParser do context "#parse - computing insert value sets" do let(:parser) { ActiveRecord::Import::ValueSetsBytesParser } let(:base_sql) { "INSERT INTO atable (a,b,c)" } let(:values) { ["(1,2,3)", "(2,3,4)", "(3,4,5)"] } context "when the max allowed bytes is 30 and the base SQL is 26 bytes" do it "should raise ActiveRecord::Import::ValueSetTooLargeError" do error = assert_raises ActiveRecord::Import::ValueSetTooLargeError do parser.parse values, reserved_bytes: base_sql.size, max_bytes: 30 end assert_match(/33 bytes exceeds the max allowed for an insert \[30\]/, error.message) end end context "when the max allowed bytes is 33 and the base SQL is 26 bytes" do it "should return 3 value sets when given 3 value sets of 7 bytes a piece" do value_sets = parser.parse values, reserved_bytes: base_sql.size, max_bytes: 33 assert_equal 3, value_sets.size end end context "when the max allowed bytes is 40 and the base SQL is 26 bytes" do it "should return 3 value sets when given 3 value sets of 7 bytes a piece" do value_sets = parser.parse values, reserved_bytes: base_sql.size, max_bytes: 40 assert_equal 3, value_sets.size end end context "when the max allowed bytes is 41 and the base SQL is 26 bytes" do it "should return 2 value sets when given 2 value sets of 7 bytes a piece" do value_sets = parser.parse values, reserved_bytes: base_sql.size, max_bytes: 41 assert_equal 2, value_sets.size end end context "when the max allowed bytes is 48 and the base SQL is 26 bytes" do it "should return 2 value sets when given 2 value sets of 7 bytes a piece" do value_sets = parser.parse values, reserved_bytes: base_sql.size, max_bytes: 48 assert_equal 2, value_sets.size end end context "when the max allowed bytes is 49 and the base SQL is 26 bytes" do it "should return 1 value sets when given 1 value sets of 7 bytes a piece" do value_sets = parser.parse values, reserved_bytes: base_sql.size, max_bytes: 49 assert_equal 1, value_sets.size end end context "when the max allowed bytes is 999999 and the base SQL is 26 bytes" do it "should return 1 value sets when given 1 value sets of 7 bytes a piece" do value_sets = parser.parse values, reserved_bytes: base_sql.size, max_bytes: 999_999 assert_equal 1, value_sets.size end end it "should properly build insert value set based on max packet allowed" do values = [ "('1','2','3')", "('4','5','6')", "('7','8','9')" ] base_sql_size_in_bytes = 15 max_bytes = 30 value_sets = parser.parse values, reserved_bytes: base_sql_size_in_bytes, max_bytes: max_bytes assert_equal 3, value_sets.size, 'Three value sets were expected!' # Each element in the value_sets array must be an array value_sets.each_with_index do |e, i| assert_kind_of Array, e, "Element #{i} was expected to be an Array!" end # Each element in the values array should have a 1:1 correlation to the elements # in the returned value_sets arrays assert_equal values[0], value_sets[0].first assert_equal values[1], value_sets[1].first assert_equal values[2], value_sets[2].first end context "data contains multi-byte chars" do it "should properly build insert value set based on max packet allowed" do # each accented e should be 2 bytes, so each entry is 6 bytes instead of 5 values = [ "('é')", "('é')" ] base_sql_size_in_bytes = 15 max_bytes = 26 value_sets = parser.parse values, reserved_bytes: base_sql_size_in_bytes, max_bytes: max_bytes assert_equal 2, value_sets.size, 'Two value sets were expected!' end end end end activerecord-import-0.28.2/test/test_helper.rb0000644000004100000410000000415213424140671021443 0ustar www-datawww-datarequire 'pathname' test_dir = Pathname.new File.dirname(__FILE__) $LOAD_PATH.unshift(File.join(File.dirname(__FILE__), '..', 'lib')) $LOAD_PATH.unshift(File.dirname(__FILE__)) require "fileutils" ENV["RAILS_ENV"] = "test" require "bundler" Bundler.setup require 'pry' unless RbConfig::CONFIG["RUBY_INSTALL_NAME"] =~ /jruby/ require "active_record" require "active_record/fixtures" require "active_support/test_case" if ActiveSupport::VERSION::STRING < "4.0" require 'test/unit' require 'mocha/test_unit' else require 'active_support/testing/autorun' require "mocha/mini_test" end require 'timecop' require 'chronic' begin require 'composite_primary_keys' rescue LoadError ENV["SKIP_COMPOSITE_PK"] = "true" end # Support MySQL 5.7 if ActiveSupport::VERSION::STRING < "4.1" require "active_record/connection_adapters/mysql2_adapter" class ActiveRecord::ConnectionAdapters::Mysql2Adapter NATIVE_DATABASE_TYPES[:primary_key] = "int(11) auto_increment PRIMARY KEY" end end require "ruby-debug" if RUBY_VERSION.to_f < 1.9 adapter = ENV["ARE_DB"] || "sqlite3" FileUtils.mkdir_p 'log' ActiveRecord::Base.logger = Logger.new("log/test.log") ActiveRecord::Base.logger.level = Logger::DEBUG ActiveRecord::Base.configurations["test"] = YAML.load_file(test_dir.join("database.yml"))[adapter] ActiveRecord::Base.default_timezone = :utc require "activerecord-import" ActiveRecord::Base.establish_connection :test ActiveSupport::Notifications.subscribe(/active_record.sql/) do |_, _, _, _, hsh| ActiveRecord::Base.logger.info hsh[:sql] end require "factory_bot" Dir[File.dirname(__FILE__) + "/support/**/*.rb"].each { |file| require file } # Load base/generic schema require test_dir.join("schema/version") require test_dir.join("schema/generic_schema") adapter_schema = test_dir.join("schema/#{adapter}_schema.rb") require adapter_schema if File.exist?(adapter_schema) Dir[File.dirname(__FILE__) + "/models/*.rb"].each { |file| require file } # Prevent this deprecation warning from breaking the tests. Rake::FileList.send(:remove_method, :import) ActiveSupport::TestCase.test_order = :random if ENV['AR_VERSION'].to_f >= 4.2 activerecord-import-0.28.2/activerecord-import.gemspec0000644000004100000410000000162313424140671023150 0ustar www-datawww-data# -*- encoding: utf-8 -*- require File.expand_path('../lib/activerecord-import/version', __FILE__) Gem::Specification.new do |gem| gem.authors = ["Zach Dennis"] gem.email = ["zach.dennis@gmail.com"] gem.summary = "Bulk insert extension for ActiveRecord" gem.description = "A library for bulk inserting data using ActiveRecord." gem.homepage = "http://github.com/zdennis/activerecord-import" gem.license = "Ruby" gem.files = `git ls-files`.split($\) gem.executables = gem.files.grep(%r{^bin/}).map { |f| File.basename(f) } gem.test_files = gem.files.grep(%r{^(test|spec|features)/}) gem.name = "activerecord-import" gem.require_paths = ["lib"] gem.version = ActiveRecord::Import::VERSION gem.required_ruby_version = ">= 1.9.2" gem.add_runtime_dependency "activerecord", ">= 3.2" gem.add_development_dependency "rake" end activerecord-import-0.28.2/gemfiles/0000755000004100000410000000000013424140671017412 5ustar www-datawww-dataactiverecord-import-0.28.2/gemfiles/4.0.gemfile0000644000004100000410000000010613424140671021242 0ustar www-datawww-datagem 'activerecord', '~> 4.0.0' gem 'composite_primary_keys', '~> 6.0' activerecord-import-0.28.2/gemfiles/5.0.gemfile0000644000004100000410000000010613424140671021243 0ustar www-datawww-datagem 'activerecord', '~> 5.0.0' gem 'composite_primary_keys', '~> 9.0' activerecord-import-0.28.2/gemfiles/4.1.gemfile0000644000004100000410000000010613424140671021243 0ustar www-datawww-datagem 'activerecord', '~> 4.1.0' gem 'composite_primary_keys', '~> 7.0' activerecord-import-0.28.2/gemfiles/4.2.gemfile0000644000004100000410000000010613424140671021244 0ustar www-datawww-datagem 'activerecord', '~> 4.2.0' gem 'composite_primary_keys', '~> 8.0' activerecord-import-0.28.2/gemfiles/5.1.gemfile0000644000004100000410000000010713424140671021245 0ustar www-datawww-datagem 'activerecord', '~> 5.1.0' gem 'composite_primary_keys', '~> 10.0' activerecord-import-0.28.2/gemfiles/5.2.gemfile0000644000004100000410000000010713424140671021246 0ustar www-datawww-datagem 'activerecord', '~> 5.2.0' gem 'composite_primary_keys', '~> 11.0' activerecord-import-0.28.2/gemfiles/3.2.gemfile0000644000004100000410000000010613424140671021243 0ustar www-datawww-datagem 'activerecord', '~> 3.2.0' gem 'composite_primary_keys', '~> 5.0' activerecord-import-0.28.2/CHANGELOG.md0000644000004100000410000003072213424140671017434 0ustar www-datawww-data## Changes in 0.28.2 ### Fixes * Fix issue where validations where not working in certain scenarios. Thanks to @CASIXx1 via \#579. ## Changes in 0.28.1 ### Fixes * Fix issue where ActiveRecord presence validations were being mutated. Limited custom presence validation to bulk imports. ## Changes in 0.28.0 ### New Features * Allow updated timestamps to be manually set.Thanks to @Rob117, @jkowens via \#570. ### Fixes * Fix validating presence of belongs_to associations. Existence of the parent record is not validated, but the foreign key field cannot be empty. Thanks to @Rob117, @jkowens via \#575. ## Changes in 0.27.0 ### New Features * Add "secret" option validate_uniqueness to enable uniqueness validators when validating import. This is not a recommended approach (See #228), but is being added back in for projects that depended on this feature. Thanks to @jkowens via \#554. ## Changes in 0.26.0 ### New Features * Add on_duplicate_key_update for SQLite. Thanks to @jkowens via \#542. * Add option to update all fields on_duplicate_key_update. Thanks to @aimerald, @jkowens via \#543. ### Fixes * Handle deeply frozen options hashes. Thanks to @jturkel via \#546. * Switch from FactoryGirl to FactoryBot. Thanks to @koic via \#547. * Allow import to work with ProxySQL. Thanks to @GregFarrell via \#550. ## Changes in 0.25.0 ### New Features * Add support for makara_postgis adapter. Thanks to @chadwilken via \#527. * Skip validating presence of belongs_to associations. Thanks to @Sohair63, @naiyt, @jkowens via \#528. ### Fixes * Add missing require for ActiveSupport.on_load. Thanks to @onk via \#529. * Support setting attribute values in before_validation callbacks. Thanks to @SirRawlins, @jkowens via \#531. * Ignore virtual columns. Thanks to @dbourguignon, @jkowens via \#530. ## Changes in 0.24.0 ### Fixes * Use the association primary key when importing. Thanks to @dpogue via \#512. * Allow association ids to be updated. Thanks to @Aristat via \#515. ## Changes in 0.23.0 ### New Features * Rename `import` method to `bulk_import` and alias to `import`. Thanks to @itay-grudev, @jkowens via \#498. * Increment lock_version on duplicate key update. Thanks to @aimerald via \#500. ### Fixes * Fix import_without_validations_or_callbacks exception if array is empty. Thanks to @doloopwhile via \#508. ## Changes in 0.22.0 ### New Features * Add support for importing hashes thru a has many association. Thanks to @jkowens via \#483. ### Fixes * Fix validation logic for recursive import. For those on Rails 5.0 and 5.1, this change requires models with polymorphic associations to specify the `inverse_of` argument (See issue #495). Thanks to @eric-simonton-sama, @jkowens via \#489. ## Changes in 0.21.0 ### New Features * Allow SQL subqueries (objects that respond to .to_sql) to be passed as values. Thanks to @jalada, @jkowens via \#471 * Raise an ArgumentError when importing an array of hashes if any of the hash objects have different keys. Thanks to @mbell697 via \#465. ### Fixes * Fix issue loading incorrect foreign key value when syncing belongs_to associations with custom foreign key columns. Thanks to @marcgreenstock, @jkowens via \#470. * Fix issue importing models with polymorphic belongs_to associations. Thanks to @zorab47, @jkowens via \#476. * Fix issue importing STI models with ActiveRecord 4.0. Thanks to @kazuki-st, @jkowens via \#478. ## Changes in 0.20.2 ### Fixes * Unscope model when synchronizing with database. Thanks to @indigoviolet via \#455. ## Changes in 0.20.1 ### Fixes * Prevent :on_duplicate_key_update args from being modified. Thanks to @joshuamcginnis, @jkowens via \#451. ## Changes in 0.20.0 ### New Features * Allow returning columns to be specified for PostgreSQL. Thanks to @tjwp via \#433. ### Fixes * Fixes an issue when bypassing uniqueness validators. Thanks to @vmaxv via \#444. * For AR < 4.2, prevent type casting for binary columns on Postgresql. Thanks to @mwalsher via \#446. * Fix issue logging class name on import. Thanks to @sophylee, @jkowens via \#447. * Copy belongs_to association id to foreign key column before importing. Thanks to @jkowens via \#448. * Reset model instance on validate. Thanks to @vmaxv via \#449. ## Changes in 0.19.1 ### Fixes * Fix a regression where models weren't properly being marked clean. Thanks to @tjwp via \#434. * Raise ActiveRecord::Import::ValueSetTooLargeError when a record being inserted exceeds the `max_allowed_packet` for MySQL. Thanks to @saizai, @jkowens via \#437. * Fix issue concatenating column names array with primary key. Thanks to @keeguon via \#440. ## Changes in 0.19.0 ### New Features * For PostgreSQL, add option to set WHERE condition in conflict_action. Thanks to @Saidbek via \#423. ### Fixes * Fix issue importing saved records with serialized fields. Thanks to @Andreis13, @jkowens via \#425. * Fix issue importing records that have columns defined with default values that are functions or expressions. Thanks to @Andreis13, @jkowens via \#428. ## Changes in 0.18.3 ### Fixes * Set models new_record attribute to false when importing with :on_duplicate_key_ignore. Thanks to @nijikon, @jkowens via \#416. ## Changes in 0.18.2 ### Fixes * Enable custom validate callbacks when validating import. Thanks to @afn via \#410. * Prevent wrong IDs being set on models when using :on_duplicate_key_ignore. Thanks to @afn, @jkowens via \#412. ## Changes in 0.18.1 ### Fixes * Fix to enable validation callbacks (before_validation, after_validation). Thanks to @sinsoku, @jkowens via \#406. ## Changes in 0.18.0 ### New Features * Uniqueness validation is bypassed when validating models since it cannot be guaranteed if there are duplicates in a batch. Thanks to @jkowens via \#301. * Allow for custom timestamp columns. Thanks to @mojidabckuu, @jkowens via \#401. ### Fixes * Fix ActiveRecord 5 issue coercing boolean values when serializing for the database. Thanks to @rjrobinson, @jkowens via \#403. ## Changes in 0.17.2 ### Fixes * Fix issue where PostgreSQL cannot recognize columns if names include mixed case characters. Thanks to @hugobgranja via \#379. * Fix an issue for ActiveRecord 5 where serialized fields with default values were not being typecast. Thanks to @whistlerbrk, @jkowens via \#386. * Add option :force_single_insert for MySQL to make sure a single insert is attempted instead of performing multiple inserts based on max_allowed_packet. Thanks to @mtparet via \#387. ## Changes in 0.17.1 ### Fixes * Along with setting id on models for adapters that support it, add created_at and updated_at timestamps. Thanks to @jacob-carlborg via \#364. * Properly set returned ids when using composite_primary_keys. Thanks to @guigs, @jkowens via \#371. ## Changes in 0.17.0 ### New Features * Add support for composite_primary_keys gem. Thanks to @jkowens via \#350. * Add support for importing an array of hashes. Thanks to @jkowens via \#352. * Add JDBC SQLite3 support. Thanks to @jkowens via \#356. ### Fixes * Remove support for SQLite recursive imports. See \#351. * Improve import speed for Rails 5. Thanks to @ranchodeluxe, @jkowens via \#359. ## Changes in 0.16.2 ### Fixes * Fixes issue clearing query cache on wrong connection when using multiple databases. Thanks to @KentoMoriwaki via \#337 * Raises an ArgumentError on incorrect usage of nested arrays. Thanks to @Nitrodist via \#340 * Fixes issue that prevented uuid primary keys from being set manually. Thanks to @Dclusin-og, @jkowens via \#342 ## Changes in 0.16.1 ### Fixes * Fixes issue with missing error messages on failed instances when importing using arrays of columns and values. Thanks to @Fivell via \#332 * Update so SQLite only return ids if table has a primary key field via \#333 ## Changes in 0.16.0 ### New Features * Add partial index upsert support for PostgreSQL. Thanks to @luislew via \#305 * Add UUID primary key support for PostgreSQL. Thanks to @jkowens via \#312 * Add store accessor support for JSON, JSON, and HSTORE data types. Thanks to @jkowens via \#322 * Log warning if database does not support :on_duplicate_key_update. Thanks to @jkowens via \#324 * Add option :on_duplicate_key_ignore for MySQL and SQLite. Thanks to @jkowens via \#326 ### Fixes * Fixes issue with recursive import using same primary key for all models. Thanks to @chopraanmol1 via \#309 * Fixes issue importing from STI subclass with polymorphic associations. Thanks to @JNajera via \#314 * Fixes issue setting returned IDs to wrong models when some fail validation. Also fixes issue with SQLite returning wrong IDs. Thanks to @mizukami234 via \#315 ## Changes in 0.15.0 ### New Features * An ArgumentError is now raised if when no `conflict_target` or `conflict_name` is provided or can be determined when using the `on_duplicate_key_update` option for PostgreSQL. Thanks to @jkowens via \#290 * Support for Rails 5.0 final release for all except the JDBC driver which is not yet updated to support Rails 5.0 ### Fixes * activerecord-import no longer modifies a value array inside of the given values array when called with `import(columns, values)`. Thanks to @jkowens via \#291 ### Misc * `raise_error` is used to raise errors for ActiveRecord 5.0. Thanks to @couragecourag via \#294 `raise_record_invalid` has been ## Changes in 0.14.1 ### Fixes * JRuby/JDBCDriver with PostgreSQL will no longer raise a JDBCDriver error when using the :no_returning boolean option. Thanks to @jkowens via \#287 ## Changes in 0.14.0 ### New Features * Support for ActiveRecord 3.1 has been dropped. Thanks to @sferik via \#254 * SQLite3 has learned the :recursive option. Thanks to @jkowens via \#281 * :on_duplicate_key_ignore will be ignored when imports are being done with :recursive. Thanks to @jkowens via \#268 * :activerecord-import learned how to tell PostgreSQL to return no data back from the import via the :no_returning boolean option. Thanks to @makaroni4 via \#276 ### Fixes * Polymorphic associations will not import the :type column. Thanks to @seanlinsley via \#282 and \#283 * ~2X speed increase for importing models with validations. Thanks to @jkowens via \#266 ### Misc * Benchmark HTML report has been fixed. Thanks to @jkowens via \#264 * seamless_database_pool has been updated to work with AR 5.0. Thanks to @jkowens via \#280 * Code cleanup, removal of redundant condition checks. Thanks to @pavlik4k via \#273 * Code cleanup, removal of deprecated `alias_method_chain`. Thanks to @codeodor via \#271 ## Changes in 0.13.0 ### New Features * Addition of :batch_size option to control the number of rows to insert per INSERT statement. The default is the total number of records being inserted so there is a single INSERT statement. Thanks to @jkowens via \#245 * Addition `import!` which will raise an exception if a validation occurs. It will fail fast. Thanks to @jkowens via \#246 ### Fixes * Fixing issue with recursive import when utilizing the `:on_duplicate_key_update` option. The `on_duplicate_key_update` only applies to parent models at this time. Thanks to @yuri-karpovich for reporting and @jkowens for fixing via \#249 ### Misc * Refactoring of fetching and assigning attributes. Thanks to @jkownes via \#259 * Lots of code cleanup and addition of Rubocop linter. Thanks to @sferik via \#256 and \#250 * Resolving errors with the test suite when running against ActiveRecord 4.0 and 4.1. Thanks to @jkowens via \#262 * Cleaning up the TravisCI settings and packages. Thanks to @sferik via \#258 and \#251 ## Changes in 0.12.0 ### New Features * PostgreSQL UPSERT support has been added. Thanks @jkowens via \#218 ### Fixes * has_one and has_many associations will now be recursively imported regardless of :autosave being set. Thanks @sferik, @jkowens via \#243, \#234 * Fixing an issue with enum column support for Rails > 4.1. Thanks @aquajach via \#235 ### Removals * Support for em-synchrony has been removed since it appears the project has been abandoned. Thanks @sferik, @zdennis via \#239 * Support for the mysql gem/adapter has been removed since it has officially been abandoned. Use the mysql2 gem/adapter instead. Thanks @sferik, @zdennis via \#239 ### Misc * Cleaned up TravisCI output and removing deprecation warnings. Thanks @jkowens, @zdennis \#242 ## Changes before 0.12.0 > Never look back. What's gone is now history. But in the process make memory of events to help you understand what will help you to make your dream a true story. Mistakes of the past are lessons, success of the past is inspiration. – Dr. Anil Kr Sinha activerecord-import-0.28.2/.rubocop.yml0000644000004100000410000000132513424140671020072 0ustar www-datawww-datainherit_from: .rubocop_todo.yml Lint/EndAlignment: AlignWith: variable Metrics/AbcSize: Enabled: false Metrics/ClassLength: Enabled: false Metrics/CyclomaticComplexity: Enabled: false Metrics/LineLength: Enabled: false Metrics/MethodLength: Enabled: false Metrics/ModuleLength: Enabled: false Metrics/PerceivedComplexity: Enabled: false Style/AlignParameters: EnforcedStyle: with_fixed_indentation Style/ClassAndModuleChildren: Enabled: false Style/Documentation: Enabled: false Style/ElseAlignment: Enabled: false Style/SpaceInsideParens: Enabled: false Style/SpecialGlobalVars: Enabled: false Style/StringLiterals: Enabled: false Style/TrailingCommaInLiteral: Enabled: false activerecord-import-0.28.2/.gitignore0000644000004100000410000000035613424140671017613 0ustar www-datawww-data## MAC OS .DS_Store ## TEXTMATE *.tmproj tmtags ## EMACS *~ \#* .\#* ## VIM *.swp ## PROJECT::GENERAL coverage rdoc pkg *.gem *.lock ## PROJECT::SPECIFIC log/*.log test.db test/database.yml .ruby-* .bundle/ .redcar/ .rvmrc docsite/ activerecord-import-0.28.2/LICENSE0000644000004100000410000000471013424140671016626 0ustar www-datawww-dataRuby is copyrighted free software by Yukihiro Matsumoto . You can redistribute it and/or modify it under either the terms of the 2-clause BSDL (see the file BSDL), or the conditions below: 1. You may make and give away verbatim copies of the source form of the software without restriction, provided that you duplicate all of the original copyright notices and associated disclaimers. 2. You may modify your copy of the software in any way, provided that you do at least ONE of the following: a) place your modifications in the Public Domain or otherwise make them Freely Available, such as by posting said modifications to Usenet or an equivalent medium, or by allowing the author to include your modifications in the software. b) use the modified software only within your corporation or organization. c) give non-standard binaries non-standard names, with instructions on where to get the original software distribution. d) make other distribution arrangements with the author. 3. You may distribute the software in object code or binary form, provided that you do at least ONE of the following: a) distribute the binaries and library files of the software, together with instructions (in the manual page or equivalent) on where to get the original distribution. b) accompany the distribution with the machine-readable source of the software. c) give non-standard binaries non-standard names, with instructions on where to get the original software distribution. d) make other distribution arrangements with the author. 4. You may modify and include the part of the software into any other software (possibly commercial). But some files in the distribution are not written by the author, so that they are not under these terms. For the list of those files and their copying conditions, see the file LEGAL. 5. The scripts and library files supplied as input to or produced as output from the software do not automatically fall under the copyright of the software, but belong to whomever generated them, and may be sold commercially, and may be aggregated with this software. 6. THIS SOFTWARE IS PROVIDED "AS IS" AND WITHOUT ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, WITHOUT LIMITATION, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. activerecord-import-0.28.2/.rubocop_todo.yml0000644000004100000410000000220113424140671021111 0ustar www-datawww-data# This configuration was generated by # `rubocop --auto-gen-config` # on 2016-03-17 18:14:55 -0700 using RuboCop version 0.38.0. # The point is for the user to remove these configuration records # one by one as the offenses are removed from the code base. # Note that changes in the inspected code, or installation of new # versions of RuboCop, may require this file to be generated again. # Offense count: 2 Lint/HandleExceptions: Exclude: - 'lib/activerecord-import/base.rb' - 'test/import_test.rb' # Offense count: 2 Lint/RescueException: Exclude: - 'benchmarks/lib/cli_parser.rb' - 'test/import_test.rb' # Offense count: 4 # Cop supports --auto-correct. # Configuration parameters: AllowUnusedKeywordArguments, IgnoreEmptyMethods. Lint/UnusedMethodArgument: Exclude: - 'lib/activerecord-import/adapters/postgresql_adapter.rb' - 'lib/activerecord-import/import.rb' # Offense count: 2 # Cop supports --auto-correct. # Configuration parameters: Keywords. # Keywords: TODO, FIXME, OPTIMIZE, HACK, REVIEW Style/CommentAnnotation: Exclude: - 'benchmarks/lib/cli_parser.rb' - 'lib/activerecord-import/import.rb' activerecord-import-0.28.2/Rakefile0000644000004100000410000000305113424140671017263 0ustar www-datawww-datarequire "bundler" Bundler.setup require 'rake' require 'rake/testtask' namespace :display do task :notice do puts puts "To run tests you must supply the adapter, see rake -T for more information." puts end end task default: ["display:notice"] ADAPTERS = %w( mysql2 mysql2_makara mysql2spatial jdbcmysql jdbcsqlite3 jdbcpostgresql postgresql postgresql_makara postgis makara_postgis sqlite3 spatialite seamless_database_pool ).freeze ADAPTERS.each do |adapter| namespace :test do desc "Runs #{adapter} database tests." Rake::TestTask.new(adapter) do |t| # FactoryBot has an issue with warnings, so turn off, so noisy # t.warning = true t.test_files = FileList["test/adapters/#{adapter}.rb", "test/*_test.rb", "test/active_record/*_test.rb", "test/#{adapter}/**/*_test.rb"] end task adapter end end begin require 'rcov/rcovtask' adapter = ENV['ARE_DB'] Rcov::RcovTask.new do |test| test.libs << 'test' test.pattern = ["test/adapters/#{adapter}.rb", "test/*_test.rb", "test/#{adapter}/**/*_test.rb"] test.verbose = true end rescue LoadError task :rcov do abort "RCov is not available. In order to run rcov, you must: sudo gem install rcov" end end require 'rdoc/task' Rake::RDocTask.new do |rdoc| version = File.exist?('VERSION') ? File.read('VERSION') : "" rdoc.rdoc_dir = 'rdoc' rdoc.title = "activerecord-import #{version}" rdoc.rdoc_files.include('README*') rdoc.rdoc_files.include('lib/**/*.rb') end require 'rubocop/rake_task' RuboCop::RakeTask.new activerecord-import-0.28.2/lib/0000755000004100000410000000000013424140671016365 5ustar www-datawww-dataactiverecord-import-0.28.2/lib/activerecord-import/0000755000004100000410000000000013424140671022347 5ustar www-datawww-dataactiverecord-import-0.28.2/lib/activerecord-import/version.rb0000644000004100000410000000011413424140671024355 0ustar www-datawww-datamodule ActiveRecord module Import VERSION = "0.28.2".freeze end end activerecord-import-0.28.2/lib/activerecord-import/adapters/0000755000004100000410000000000013424140671024152 5ustar www-datawww-dataactiverecord-import-0.28.2/lib/activerecord-import/adapters/abstract_adapter.rb0000644000004100000410000000375613424140671030015 0ustar www-datawww-datamodule ActiveRecord::Import::AbstractAdapter module InstanceMethods def next_value_for_sequence(sequence_name) %(#{sequence_name}.nextval) end def insert_many( sql, values, _options = {}, *args ) # :nodoc: number_of_inserts = 1 base_sql, post_sql = if sql.is_a?( String ) [sql, ''] elsif sql.is_a?( Array ) [sql.shift, sql.join( ' ' )] end sql2insert = base_sql + values.join( ',' ) + post_sql insert( sql2insert, *args ) ActiveRecord::Import::Result.new([], number_of_inserts, [], []) end def pre_sql_statements(options) sql = [] sql << options[:pre_sql] if options[:pre_sql] sql << options[:command] if options[:command] # add keywords like IGNORE or DELAYED if options[:keywords].is_a?(Array) sql.concat(options[:keywords]) elsif options[:keywords] sql << options[:keywords].to_s end sql end # Synchronizes the passed in ActiveRecord instances with the records in # the database by calling +reload+ on each instance. def after_import_synchronize( instances ) instances.each(&:reload) end # Returns an array of post SQL statements given the passed in options. def post_sql_statements( table_name, options ) # :nodoc: post_sql_statements = [] if supports_on_duplicate_key_update? && options[:on_duplicate_key_update] post_sql_statements << sql_for_on_duplicate_key_update( table_name, options[:on_duplicate_key_update], options[:primary_key], options[:locking_column] ) elsif options[:on_duplicate_key_update] logger.warn "Ignoring on_duplicate_key_update because it is not supported by the database." end # custom user post_sql post_sql_statements << options[:post_sql] if options[:post_sql] # with rollup post_sql_statements << rollup_sql if options[:rollup] post_sql_statements end def supports_on_duplicate_key_update? false end end end activerecord-import-0.28.2/lib/activerecord-import/adapters/mysql_adapter.rb0000644000004100000410000001115713424140671027351 0ustar www-datawww-datamodule ActiveRecord::Import::MysqlAdapter include ActiveRecord::Import::ImportSupport include ActiveRecord::Import::OnDuplicateKeyUpdateSupport NO_MAX_PACKET = 0 QUERY_OVERHEAD = 8 # This was shown to be true for MySQL, but it's not clear where the overhead is from. # +sql+ can be a single string or an array. If it is an array all # elements that are in position >= 1 will be appended to the final SQL. def insert_many( sql, values, options = {}, *args ) # :nodoc: # the number of inserts default number_of_inserts = 0 base_sql, post_sql = if sql.is_a?( String ) [sql, ''] elsif sql.is_a?( Array ) [sql.shift, sql.join( ' ' )] end sql_size = QUERY_OVERHEAD + base_sql.size + post_sql.size # the number of bytes the requested insert statement values will take up values_in_bytes = values.sum(&:bytesize) # the number of bytes (commas) it will take to comma separate our values comma_separated_bytes = values.size - 1 # the total number of bytes required if this statement is one statement total_bytes = sql_size + values_in_bytes + comma_separated_bytes max = max_allowed_packet # if we can insert it all as one statement if NO_MAX_PACKET == max || total_bytes <= max || options[:force_single_insert] number_of_inserts += 1 sql2insert = base_sql + values.join( ',' ) + post_sql insert( sql2insert, *args ) else value_sets = ::ActiveRecord::Import::ValueSetsBytesParser.parse(values, reserved_bytes: sql_size, max_bytes: max) transaction(requires_new: true) do value_sets.each do |value_set| number_of_inserts += 1 sql2insert = base_sql + value_set.join( ',' ) + post_sql insert( sql2insert, *args ) end end end ActiveRecord::Import::Result.new([], number_of_inserts, [], []) end # Returns the maximum number of bytes that the server will allow # in a single packet def max_allowed_packet # :nodoc: @max_allowed_packet ||= begin result = execute( "SHOW VARIABLES like 'max_allowed_packet'" ) # original Mysql gem responds to #fetch_row while Mysql2 responds to #first val = result.respond_to?(:fetch_row) ? result.fetch_row[1] : result.first[1] val.to_i end end def pre_sql_statements( options) sql = [] sql << "IGNORE" if options[:ignore] || options[:on_duplicate_key_ignore] sql + super end # Add a column to be updated on duplicate key update def add_column_for_on_duplicate_key_update( column, options = {} ) # :nodoc: if options.include?(:on_duplicate_key_update) columns = options[:on_duplicate_key_update] case columns when Array then columns << column.to_sym unless columns.include?(column.to_sym) when Hash then columns[column.to_sym] = column.to_sym end elsif !options[:ignore] && !options[:on_duplicate_key_ignore] options[:on_duplicate_key_update] = [column.to_sym] end end # Returns a generated ON DUPLICATE KEY UPDATE statement given the passed # in +args+. def sql_for_on_duplicate_key_update( table_name, *args ) # :nodoc: sql = ' ON DUPLICATE KEY UPDATE ' arg = args.first locking_column = args.last if arg.is_a?( Array ) sql << sql_for_on_duplicate_key_update_as_array( table_name, locking_column, arg ) elsif arg.is_a?( Hash ) sql << sql_for_on_duplicate_key_update_as_hash( table_name, locking_column, arg ) elsif arg.is_a?( String ) sql << arg else raise ArgumentError, "Expected Array or Hash" end sql end def sql_for_on_duplicate_key_update_as_array( table_name, locking_column, arr ) # :nodoc: results = arr.map do |column| qc = quote_column_name( column ) "#{table_name}.#{qc}=VALUES(#{qc})" end increment_locking_column!(results, table_name, locking_column) results.join( ',' ) end def sql_for_on_duplicate_key_update_as_hash( table_name, locking_column, hsh ) # :nodoc: results = hsh.map do |column1, column2| qc1 = quote_column_name( column1 ) qc2 = quote_column_name( column2 ) "#{table_name}.#{qc1}=VALUES( #{qc2} )" end increment_locking_column!(results, table_name, locking_column) results.join( ',') end # Return true if the statement is a duplicate key record error def duplicate_key_update_error?(exception) # :nodoc: exception.is_a?(ActiveRecord::StatementInvalid) && exception.to_s.include?('Duplicate entry') end def increment_locking_column!(results, table_name, locking_column) if locking_column.present? results << "#{table_name}.`#{locking_column}`=`#{locking_column}`+1" end end end activerecord-import-0.28.2/lib/activerecord-import/adapters/mysql2_adapter.rb0000644000004100000410000000022213424140671027422 0ustar www-datawww-datarequire "activerecord-import/adapters/mysql_adapter" module ActiveRecord::Import::Mysql2Adapter include ActiveRecord::Import::MysqlAdapter end activerecord-import-0.28.2/lib/activerecord-import/adapters/em_mysql2_adapter.rb0000644000004100000410000000022413424140671030105 0ustar www-datawww-datarequire "activerecord-import/adapters/mysql_adapter" module ActiveRecord::Import::EMMysql2Adapter include ActiveRecord::Import::MysqlAdapter end activerecord-import-0.28.2/lib/activerecord-import/adapters/sqlite3_adapter.rb0000644000004100000410000001354013424140671027566 0ustar www-datawww-datamodule ActiveRecord::Import::SQLite3Adapter include ActiveRecord::Import::ImportSupport include ActiveRecord::Import::OnDuplicateKeyUpdateSupport MIN_VERSION_FOR_IMPORT = "3.7.11".freeze MIN_VERSION_FOR_UPSERT = "3.24.0".freeze SQLITE_LIMIT_COMPOUND_SELECT = 500 # Override our conformance to ActiveRecord::Import::ImportSupport interface # to ensure that we only support import in supported version of SQLite. # Which INSERT statements with multiple value sets was introduced in 3.7.11. def supports_import?(current_version = sqlite_version) if current_version >= MIN_VERSION_FOR_IMPORT true else false end end def supports_on_duplicate_key_update?(current_version = sqlite_version) current_version >= MIN_VERSION_FOR_UPSERT end # +sql+ can be a single string or an array. If it is an array all # elements that are in position >= 1 will be appended to the final SQL. def insert_many( sql, values, _options = {}, *args ) # :nodoc: number_of_inserts = 0 base_sql, post_sql = if sql.is_a?( String ) [sql, ''] elsif sql.is_a?( Array ) [sql.shift, sql.join( ' ' )] end value_sets = ::ActiveRecord::Import::ValueSetsRecordsParser.parse(values, max_records: SQLITE_LIMIT_COMPOUND_SELECT) transaction(requires_new: true) do value_sets.each do |value_set| number_of_inserts += 1 sql2insert = base_sql + value_set.join( ',' ) + post_sql insert( sql2insert, *args ) end end ActiveRecord::Import::Result.new([], number_of_inserts, [], []) end def pre_sql_statements( options ) sql = [] # Options :recursive and :on_duplicate_key_ignore are mutually exclusive if !supports_on_duplicate_key_update? && (options[:ignore] || options[:on_duplicate_key_ignore]) sql << "OR IGNORE" end sql + super end def post_sql_statements( table_name, options ) # :nodoc: sql = [] if supports_on_duplicate_key_update? # Options :recursive and :on_duplicate_key_ignore are mutually exclusive if (options[:ignore] || options[:on_duplicate_key_ignore]) && !options[:on_duplicate_key_update] sql << sql_for_on_duplicate_key_ignore( options[:on_duplicate_key_ignore] ) end end sql + super end def next_value_for_sequence(sequence_name) %{nextval('#{sequence_name}')} end # Add a column to be updated on duplicate key update def add_column_for_on_duplicate_key_update( column, options = {} ) # :nodoc: arg = options[:on_duplicate_key_update] if arg.is_a?( Hash ) columns = arg.fetch( :columns ) { arg[:columns] = [] } case columns when Array then columns << column.to_sym unless columns.include?( column.to_sym ) when Hash then columns[column.to_sym] = column.to_sym end elsif arg.is_a?( Array ) arg << column.to_sym unless arg.include?( column.to_sym ) end end # Returns a generated ON CONFLICT DO NOTHING statement given the passed # in +args+. def sql_for_on_duplicate_key_ignore( *args ) # :nodoc: arg = args.first conflict_target = sql_for_conflict_target( arg ) if arg.is_a?( Hash ) " ON CONFLICT #{conflict_target}DO NOTHING" end # Returns a generated ON CONFLICT DO UPDATE statement given the passed # in +args+. def sql_for_on_duplicate_key_update( _table_name, *args ) # :nodoc: arg, primary_key, locking_column = args arg = { columns: arg } if arg.is_a?( Array ) || arg.is_a?( String ) return unless arg.is_a?( Hash ) sql = ' ON CONFLICT ' conflict_target = sql_for_conflict_target( arg ) columns = arg.fetch( :columns, [] ) condition = arg[:condition] if columns.respond_to?( :empty? ) && columns.empty? return sql << "#{conflict_target}DO NOTHING" end conflict_target ||= sql_for_default_conflict_target( primary_key ) unless conflict_target raise ArgumentError, 'Expected :conflict_target to be specified' end sql << "#{conflict_target}DO UPDATE SET " if columns.is_a?( Array ) sql << sql_for_on_duplicate_key_update_as_array( locking_column, columns ) elsif columns.is_a?( Hash ) sql << sql_for_on_duplicate_key_update_as_hash( locking_column, columns ) elsif columns.is_a?( String ) sql << columns else raise ArgumentError, 'Expected :columns to be an Array or Hash' end sql << " WHERE #{condition}" if condition.present? sql end def sql_for_on_duplicate_key_update_as_array( locking_column, arr ) # :nodoc: results = arr.map do |column| qc = quote_column_name( column ) "#{qc}=EXCLUDED.#{qc}" end increment_locking_column!(results, locking_column) results.join( ',' ) end def sql_for_on_duplicate_key_update_as_hash( locking_column, hsh ) # :nodoc: results = hsh.map do |column1, column2| qc1 = quote_column_name( column1 ) qc2 = quote_column_name( column2 ) "#{qc1}=EXCLUDED.#{qc2}" end increment_locking_column!(results, locking_column) results.join( ',' ) end def sql_for_conflict_target( args = {} ) conflict_target = args[:conflict_target] index_predicate = args[:index_predicate] if conflict_target.present? '(' << Array( conflict_target ).reject( &:blank? ).join( ', ' ) << ') '.tap do |sql| sql << "WHERE #{index_predicate} " if index_predicate end end end def sql_for_default_conflict_target( primary_key ) conflict_target = Array(primary_key).join(', ') "(#{conflict_target}) " if conflict_target.present? end # Return true if the statement is a duplicate key record error def duplicate_key_update_error?(exception) # :nodoc: exception.is_a?(ActiveRecord::StatementInvalid) && exception.to_s.include?('duplicate key') end def increment_locking_column!(results, locking_column) if locking_column.present? results << "\"#{locking_column}\"=EXCLUDED.\"#{locking_column}\"+1" end end end activerecord-import-0.28.2/lib/activerecord-import/adapters/postgresql_adapter.rb0000644000004100000410000001551313424140671030407 0ustar www-datawww-datamodule ActiveRecord::Import::PostgreSQLAdapter include ActiveRecord::Import::ImportSupport include ActiveRecord::Import::OnDuplicateKeyUpdateSupport MIN_VERSION_FOR_UPSERT = 90_500 def insert_many( sql, values, options = {}, *args ) # :nodoc: number_of_inserts = 1 returned_values = [] ids = [] results = [] base_sql, post_sql = if sql.is_a?( String ) [sql, ''] elsif sql.is_a?( Array ) [sql.shift, sql.join( ' ' )] end sql2insert = base_sql + values.join( ',' ) + post_sql columns = returning_columns(options) if columns.blank? || options[:no_returning] insert( sql2insert, *args ) else returned_values = if columns.size > 1 # Select composite columns select_rows( sql2insert, *args ) else select_values( sql2insert, *args ) end query_cache.clear if query_cache_enabled end if options[:returning].blank? ids = returned_values elsif options[:primary_key].blank? results = returned_values else # split primary key and returning columns ids, results = split_ids_and_results(returned_values, columns, options) end ActiveRecord::Import::Result.new([], number_of_inserts, ids, results) end def split_ids_and_results(values, columns, options) ids = [] results = [] id_indexes = Array(options[:primary_key]).map { |key| columns.index(key) } returning_indexes = Array(options[:returning]).map { |key| columns.index(key) } values.each do |value| value_array = Array(value) ids << id_indexes.map { |i| value_array[i] } results << returning_indexes.map { |i| value_array[i] } end ids.map!(&:first) if id_indexes.size == 1 results.map!(&:first) if returning_indexes.size == 1 [ids, results] end def next_value_for_sequence(sequence_name) %{nextval('#{sequence_name}')} end def post_sql_statements( table_name, options ) # :nodoc: sql = [] if supports_on_duplicate_key_update? # Options :recursive and :on_duplicate_key_ignore are mutually exclusive if (options[:ignore] || options[:on_duplicate_key_ignore]) && !options[:on_duplicate_key_update] && !options[:recursive] sql << sql_for_on_duplicate_key_ignore( table_name, options[:on_duplicate_key_ignore] ) end elsif options[:on_duplicate_key_ignore] && !options[:on_duplicate_key_update] logger.warn "Ignoring on_duplicate_key_ignore because it is not supported by the database." end sql += super(table_name, options) columns = returning_columns(options) unless columns.blank? || options[:no_returning] sql << " RETURNING \"#{columns.join('", "')}\"" end sql end def returning_columns(options) columns = [] columns += Array(options[:primary_key]) if options[:primary_key].present? columns |= Array(options[:returning]) if options[:returning].present? columns end # Add a column to be updated on duplicate key update def add_column_for_on_duplicate_key_update( column, options = {} ) # :nodoc: arg = options[:on_duplicate_key_update] if arg.is_a?( Hash ) columns = arg.fetch( :columns ) { arg[:columns] = [] } case columns when Array then columns << column.to_sym unless columns.include?( column.to_sym ) when Hash then columns[column.to_sym] = column.to_sym end elsif arg.is_a?( Array ) arg << column.to_sym unless arg.include?( column.to_sym ) end end # Returns a generated ON CONFLICT DO NOTHING statement given the passed # in +args+. def sql_for_on_duplicate_key_ignore( table_name, *args ) # :nodoc: arg = args.first conflict_target = sql_for_conflict_target( arg ) if arg.is_a?( Hash ) " ON CONFLICT #{conflict_target}DO NOTHING" end # Returns a generated ON CONFLICT DO UPDATE statement given the passed # in +args+. def sql_for_on_duplicate_key_update( table_name, *args ) # :nodoc: arg, primary_key, locking_column = args arg = { columns: arg } if arg.is_a?( Array ) || arg.is_a?( String ) return unless arg.is_a?( Hash ) sql = ' ON CONFLICT ' conflict_target = sql_for_conflict_target( arg ) columns = arg.fetch( :columns, [] ) condition = arg[:condition] if columns.respond_to?( :empty? ) && columns.empty? return sql << "#{conflict_target}DO NOTHING" end conflict_target ||= sql_for_default_conflict_target( table_name, primary_key ) unless conflict_target raise ArgumentError, 'Expected :conflict_target or :constraint_name to be specified' end sql << "#{conflict_target}DO UPDATE SET " if columns.is_a?( Array ) sql << sql_for_on_duplicate_key_update_as_array( table_name, locking_column, columns ) elsif columns.is_a?( Hash ) sql << sql_for_on_duplicate_key_update_as_hash( table_name, locking_column, columns ) elsif columns.is_a?( String ) sql << columns else raise ArgumentError, 'Expected :columns to be an Array or Hash' end sql << " WHERE #{condition}" if condition.present? sql end def sql_for_on_duplicate_key_update_as_array( table_name, locking_column, arr ) # :nodoc: results = arr.map do |column| qc = quote_column_name( column ) "#{qc}=EXCLUDED.#{qc}" end increment_locking_column!(results, locking_column) results.join( ',' ) end def sql_for_on_duplicate_key_update_as_hash( table_name, locking_column, hsh ) # :nodoc: results = hsh.map do |column1, column2| qc1 = quote_column_name( column1 ) qc2 = quote_column_name( column2 ) "#{qc1}=EXCLUDED.#{qc2}" end increment_locking_column!(results, locking_column) results.join( ',' ) end def sql_for_conflict_target( args = {} ) constraint_name = args[:constraint_name] conflict_target = args[:conflict_target] index_predicate = args[:index_predicate] if constraint_name.present? "ON CONSTRAINT #{constraint_name} " elsif conflict_target.present? '(' << Array( conflict_target ).reject( &:blank? ).join( ', ' ) << ') '.tap do |sql| sql << "WHERE #{index_predicate} " if index_predicate end end end def sql_for_default_conflict_target( table_name, primary_key ) conflict_target = Array(primary_key).join(', ') "(#{conflict_target}) " if conflict_target.present? end # Return true if the statement is a duplicate key record error def duplicate_key_update_error?(exception) # :nodoc: exception.is_a?(ActiveRecord::StatementInvalid) && exception.to_s.include?('duplicate key') end def supports_on_duplicate_key_update?(current_version = postgresql_version) current_version >= MIN_VERSION_FOR_UPSERT end def supports_setting_primary_key_of_imported_objects? true end def increment_locking_column!(results, locking_column) if locking_column.present? results << "\"#{locking_column}\"=EXCLUDED.\"#{locking_column}\"+1" end end end activerecord-import-0.28.2/lib/activerecord-import/base.rb0000644000004100000410000000213113424140671023603 0ustar www-datawww-datarequire "pathname" require "active_record" require "active_record/version" module ActiveRecord::Import ADAPTER_PATH = "activerecord-import/active_record/adapters".freeze def self.base_adapter(adapter) case adapter when 'mysql2_makara' then 'mysql2' when 'mysql2spatial' then 'mysql2' when 'spatialite' then 'sqlite3' when 'postgresql_makara' then 'postgresql' when 'makara_postgis' then 'postgresql' when 'postgis' then 'postgresql' else adapter end end # Loads the import functionality for a specific database adapter def self.require_adapter(adapter) require File.join(ADAPTER_PATH, "/#{base_adapter(adapter)}_adapter") rescue LoadError # fallback end # Loads the import functionality for the passed in ActiveRecord connection def self.load_from_connection_pool(connection_pool) require_adapter connection_pool.spec.config[:adapter] end end require 'activerecord-import/import' require 'activerecord-import/active_record/adapters/abstract_adapter' require 'activerecord-import/synchronize' require 'activerecord-import/value_sets_parser' activerecord-import-0.28.2/lib/activerecord-import/import.rb0000644000004100000410000012573513424140671024223 0ustar www-datawww-datarequire "ostruct" module ActiveRecord::Import::ConnectionAdapters; end module ActiveRecord::Import #:nodoc: Result = Struct.new(:failed_instances, :num_inserts, :ids, :results) module ImportSupport #:nodoc: def supports_import? #:nodoc: true end end module OnDuplicateKeyUpdateSupport #:nodoc: def supports_on_duplicate_key_update? #:nodoc: true end end class MissingColumnError < StandardError def initialize(name, index) super "Missing column for value <#{name}> at index #{index}" end end class Validator def initialize(klass, options = {}) @options = options init_validations(klass) end def init_validations(klass) @validate_callbacks = klass._validate_callbacks.dup @validate_callbacks.each_with_index do |callback, i| filter = callback.raw_filter next unless filter.class.name =~ /Validations::PresenceValidator/ || (!@options[:validate_uniqueness] && filter.is_a?(ActiveRecord::Validations::UniquenessValidator)) callback = callback.dup filter = filter.dup attrs = filter.instance_variable_get(:@attributes).dup if filter.is_a?(ActiveRecord::Validations::UniquenessValidator) attrs = [] else associations = klass.reflect_on_all_associations(:belongs_to) associations.each do |assoc| if (index = attrs.index(assoc.name)) key = assoc.foreign_key.to_sym attrs[index] = key unless attrs.include?(key) end end end filter.instance_variable_set(:@attributes, attrs) if @validate_callbacks.respond_to?(:chain, true) @validate_callbacks.send(:chain).tap do |chain| callback.instance_variable_set(:@filter, filter) chain[i] = callback end else callback.raw_filter = filter callback.filter = callback.send(:_compile_filter, filter) @validate_callbacks[i] = callback end end end def valid_model?(model) validation_context = @options[:validate_with_context] validation_context ||= (model.new_record? ? :create : :update) current_context = model.send(:validation_context) begin model.send(:validation_context=, validation_context) model.errors.clear model.run_callbacks(:validation) do if defined?(ActiveSupport::Callbacks::Filters::Environment) # ActiveRecord >= 4.1 runner = @validate_callbacks.compile env = ActiveSupport::Callbacks::Filters::Environment.new(model, false, nil) if runner.respond_to?(:call) # ActiveRecord < 5.1 runner.call(env) else # ActiveRecord 5.1 # Note that this is a gross simplification of ActiveSupport::Callbacks#run_callbacks. # It's technically possible for there to exist an "around" callback in the # :validate chain, but this would be an aberration, since Rails doesn't define # "around_validate". Still, rather than silently ignoring such callbacks, we # explicitly raise a RuntimeError, since activerecord-import was asked to perform # validations and it's unable to do so. # # The alternative here would be to copy-and-paste the bulk of the # ActiveSupport::Callbacks#run_callbacks method, which is undesirable if there's # no real-world use case for it. raise "The :validate callback chain contains an 'around' callback, which is unsupported" unless runner.final? runner.invoke_before(env) runner.invoke_after(env) end elsif @validate_callbacks.method(:compile).arity == 0 # ActiveRecord = 4.0 model.instance_eval @validate_callbacks.compile else # ActiveRecord 3.x model.instance_eval @validate_callbacks.compile(nil, model) end end model.errors.empty? ensure model.send(:validation_context=, current_context) end end end end class ActiveRecord::Associations::CollectionProxy def bulk_import(*args, &block) @association.bulk_import(*args, &block) end alias import bulk_import unless respond_to? :import end class ActiveRecord::Associations::CollectionAssociation def bulk_import(*args, &block) unless owner.persisted? raise ActiveRecord::RecordNotSaved, "You cannot call import unless the parent is saved" end options = args.last.is_a?(Hash) ? args.pop : {} model_klass = reflection.klass symbolized_foreign_key = reflection.foreign_key.to_sym symbolized_column_names = if model_klass.connection.respond_to?(:supports_virtual_columns?) && model_klass.connection.supports_virtual_columns? model_klass.columns.reject(&:virtual?).map { |c| c.name.to_sym } else model_klass.column_names.map(&:to_sym) end owner_primary_key = reflection.active_record_primary_key.to_sym owner_primary_key_value = owner.send(owner_primary_key) # assume array of model objects if args.last.is_a?( Array ) && args.last.first.is_a?(ActiveRecord::Base) if args.length == 2 models = args.last column_names = args.first.dup else models = args.first column_names = symbolized_column_names end unless symbolized_column_names.include?(symbolized_foreign_key) column_names << symbolized_foreign_key end models.each do |m| m.public_send "#{symbolized_foreign_key}=", owner_primary_key_value m.public_send "#{reflection.type}=", owner.class.name if reflection.type end return model_klass.bulk_import column_names, models, options # supports array of hash objects elsif args.last.is_a?( Array ) && args.last.first.is_a?(Hash) if args.length == 2 array_of_hashes = args.last column_names = args.first.dup allow_extra_hash_keys = true else array_of_hashes = args.first column_names = array_of_hashes.first.keys allow_extra_hash_keys = false end symbolized_column_names = column_names.map(&:to_sym) unless symbolized_column_names.include?(symbolized_foreign_key) column_names << symbolized_foreign_key end if reflection.type && !symbolized_column_names.include?(reflection.type.to_sym) column_names << reflection.type.to_sym end array_of_attributes = array_of_hashes.map do |h| error_message = model_klass.send(:validate_hash_import, h, symbolized_column_names, allow_extra_hash_keys) raise ArgumentError, error_message if error_message column_names.map do |key| if key == symbolized_foreign_key owner_primary_key_value elsif reflection.type && key == reflection.type.to_sym owner.class.name else h[key] end end end return model_klass.bulk_import column_names, array_of_attributes, options # supports empty array elsif args.last.is_a?( Array ) && args.last.empty? return ActiveRecord::Import::Result.new([], 0, []) # supports 2-element array and array elsif args.size == 2 && args.first.is_a?( Array ) && args.last.is_a?( Array ) column_names, array_of_attributes = args # dup the passed args so we don't modify unintentionally column_names = column_names.dup array_of_attributes = array_of_attributes.map(&:dup) symbolized_column_names = column_names.map(&:to_sym) if symbolized_column_names.include?(symbolized_foreign_key) index = symbolized_column_names.index(symbolized_foreign_key) array_of_attributes.each { |attrs| attrs[index] = owner_primary_key_value } else column_names << symbolized_foreign_key array_of_attributes.each { |attrs| attrs << owner_primary_key_value } end if reflection.type symbolized_type = reflection.type.to_sym if symbolized_column_names.include?(symbolized_type) index = symbolized_column_names.index(symbolized_type) array_of_attributes.each { |attrs| attrs[index] = owner.class.name } else column_names << symbolized_type array_of_attributes.each { |attrs| attrs << owner.class.name } end end return model_klass.bulk_import column_names, array_of_attributes, options else raise ArgumentError, "Invalid arguments!" end end alias import bulk_import unless respond_to? :import end class ActiveRecord::Base class << self def establish_connection_with_activerecord_import(*args) conn = establish_connection_without_activerecord_import(*args) ActiveRecord::Import.load_from_connection_pool connection_pool conn end alias establish_connection_without_activerecord_import establish_connection alias establish_connection establish_connection_with_activerecord_import # Returns true if the current database connection adapter # supports import functionality, otherwise returns false. def supports_import?(*args) connection.respond_to?(:supports_import?) && connection.supports_import?(*args) end # Returns true if the current database connection adapter # supports on duplicate key update functionality, otherwise # returns false. def supports_on_duplicate_key_update? connection.respond_to?(:supports_on_duplicate_key_update?) && connection.supports_on_duplicate_key_update? end # returns true if the current database connection adapter # supports setting the primary key of bulk imported models, otherwise # returns false def supports_setting_primary_key_of_imported_objects? connection.respond_to?(:supports_setting_primary_key_of_imported_objects?) && connection.supports_setting_primary_key_of_imported_objects? end # Imports a collection of values to the database. # # This is more efficient than using ActiveRecord::Base#create or # ActiveRecord::Base#save multiple times. This method works well if # you want to create more than one record at a time and do not care # about having ActiveRecord objects returned for each record # inserted. # # This can be used with or without validations. It does not utilize # the ActiveRecord::Callbacks during creation/modification while # performing the import. # # == Usage # Model.import array_of_models # Model.import column_names, array_of_models # Model.import array_of_hash_objects # Model.import column_names, array_of_hash_objects # Model.import column_names, array_of_values # Model.import column_names, array_of_values, options # # ==== Model.import array_of_models # # With this form you can call _import_ passing in an array of model # objects that you want updated. # # ==== Model.import column_names, array_of_values # # The first parameter +column_names+ is an array of symbols or # strings which specify the columns that you want to update. # # The second parameter, +array_of_values+, is an array of # arrays. Each subarray is a single set of values for a new # record. The order of values in each subarray should match up to # the order of the +column_names+. # # ==== Model.import column_names, array_of_values, options # # The first two parameters are the same as the above form. The third # parameter, +options+, is a hash. This is optional. Please see # below for what +options+ are available. # # == Options # * +validate+ - true|false, tells import whether or not to use # ActiveRecord validations. Validations are enforced by default. # It skips the uniqueness validation for performance reasons. # You can find more details here: # https://github.com/zdennis/activerecord-import/issues/228 # * +ignore+ - true|false, an alias for on_duplicate_key_ignore. # * +on_duplicate_key_ignore+ - true|false, tells import to discard # records that contain duplicate keys. For Postgres 9.5+ it adds # ON CONFLICT DO NOTHING, for MySQL it uses INSERT IGNORE, and for # SQLite it uses INSERT OR IGNORE. Cannot be enabled on a # recursive import. For database adapters that normally support # setting primary keys on imported objects, this option prevents # that from occurring. # * +on_duplicate_key_update+ - :all, an Array, or Hash, tells import to # use MySQL's ON DUPLICATE KEY UPDATE or Postgres/SQLite ON CONFLICT # DO UPDATE ability. See On Duplicate Key Update below. # * +synchronize+ - an array of ActiveRecord instances for the model # that you are currently importing data into. This synchronizes # existing model instances in memory with updates from the import. # * +timestamps+ - true|false, tells import to not add timestamps # (if false) even if record timestamps is disabled in ActiveRecord::Base # * +recursive+ - true|false, tells import to import all has_many/has_one # associations if the adapter supports setting the primary keys of the # newly imported objects. PostgreSQL only. # * +batch_size+ - an integer value to specify the max number of records to # include per insert. Defaults to the total number of records to import. # # == Examples # class BlogPost < ActiveRecord::Base ; end # # # Example using array of model objects # posts = [ BlogPost.new author_name: 'Zach Dennis', title: 'AREXT', # BlogPost.new author_name: 'Zach Dennis', title: 'AREXT2', # BlogPost.new author_name: 'Zach Dennis', title: 'AREXT3' ] # BlogPost.import posts # # # Example using array_of_hash_objects # # NOTE: column_names will be determined by using the keys of the first hash in the array. If later hashes in the # # array have different keys an exception will be raised. If you have hashes to import with different sets of keys # # we recommend grouping these into batches before importing. # values = [ {author_name: 'zdennis', title: 'test post'} ], [ {author_name: 'jdoe', title: 'another test post'} ] ] # BlogPost.import values # # # Example using column_names and array_of_hash_objects # columns = [ :author_name, :title ] # values = [ {author_name: 'zdennis', title: 'test post'} ], [ {author_name: 'jdoe', title: 'another test post'} ] ] # BlogPost.import columns, values # # # Example using column_names and array_of_values # columns = [ :author_name, :title ] # values = [ [ 'zdennis', 'test post' ], [ 'jdoe', 'another test post' ] ] # BlogPost.import columns, values # # # Example using column_names, array_of_value and options # columns = [ :author_name, :title ] # values = [ [ 'zdennis', 'test post' ], [ 'jdoe', 'another test post' ] ] # BlogPost.import( columns, values, validate: false ) # # # Example synchronizing existing instances in memory # post = BlogPost.where(author_name: 'zdennis').first # puts post.author_name # => 'zdennis' # columns = [ :author_name, :title ] # values = [ [ 'yoda', 'test post' ] ] # BlogPost.import posts, synchronize: [ post ] # puts post.author_name # => 'yoda' # # # Example synchronizing unsaved/new instances in memory by using a uniqued imported field # posts = [BlogPost.new(title: "Foo"), BlogPost.new(title: "Bar")] # BlogPost.import posts, synchronize: posts, synchronize_keys: [:title] # puts posts.first.persisted? # => true # # == On Duplicate Key Update (MySQL) # # The :on_duplicate_key_update option can be either :all, an Array, or a Hash. # # ==== Using :all # # The :on_duplicate_key_update option can be set to :all. All columns # other than the primary key are updated. If a list of column names is # supplied, only those columns will be updated. Below is an example: # # BlogPost.import columns, values, on_duplicate_key_update: :all # # ==== Using an Array # # The :on_duplicate_key_update option can be an array of column # names. The column names are the only fields that are updated if # a duplicate record is found. Below is an example: # # BlogPost.import columns, values, on_duplicate_key_update: [ :date_modified, :content, :author ] # # ==== Using A Hash # # The :on_duplicate_key_update option can be a hash of column names # to model attribute name mappings. This gives you finer grained # control over what fields are updated with what attributes on your # model. Below is an example: # # BlogPost.import columns, attributes, on_duplicate_key_update: { title: :title } # # == On Duplicate Key Update (Postgres 9.5+ and SQLite 3.24+) # # The :on_duplicate_key_update option can be :all, an Array, or a Hash with up to # three attributes, :conflict_target (and optionally :index_predicate) or # :constraint_name (Postgres), and :columns. # # ==== Using :all # # The :on_duplicate_key_update option can be set to :all. All columns # other than the primary key are updated. If a list of column names is # supplied, only those columns will be updated. Below is an example: # # BlogPost.import columns, values, on_duplicate_key_update: :all # # ==== Using an Array # # The :on_duplicate_key_update option can be an array of column # names. This option only handles inserts that conflict with the # primary key. If a table does not have a primary key, this will # not work. The column names are the only fields that are updated # if a duplicate record is found. Below is an example: # # BlogPost.import columns, values, on_duplicate_key_update: [ :date_modified, :content, :author ] # # ==== Using a Hash # # The :on_duplicate_key_update option can be a hash with up to three # attributes, :conflict_target (and optionally :index_predicate) or # :constraint_name, and :columns. Unlike MySQL, Postgres requires the # conflicting constraint to be explicitly specified. Using this option # allows you to specify a constraint other than the primary key. # # ===== :conflict_target # # The :conflict_target attribute specifies the columns that make up the # conflicting unique constraint and can be a single column or an array of # column names. This attribute is ignored if :constraint_name is included, # but it is the preferred method of identifying a constraint. It will # default to the primary key. Below is an example: # # BlogPost.import columns, values, on_duplicate_key_update: { conflict_target: [ :author_id, :slug ], columns: [ :date_modified ] } # # ===== :index_predicate # # The :index_predicate attribute optionally specifies a WHERE condition # on :conflict_target, which is required for matching against partial # indexes. This attribute is ignored if :constraint_name is included. # Below is an example: # # BlogPost.import columns, values, on_duplicate_key_update: { conflict_target: [ :author_id, :slug ], index_predicate: 'status <> 0', columns: [ :date_modified ] } # # ===== :constraint_name # # The :constraint_name attribute explicitly identifies the conflicting # unique index by name. Postgres documentation discourages using this method # of identifying an index unless absolutely necessary. Below is an example: # # BlogPost.import columns, values, on_duplicate_key_update: { constraint_name: :blog_posts_pkey, columns: [ :date_modified ] } # # ===== :condition # # The :condition attribute optionally specifies a WHERE condition # on :conflict_action. Only rows for which this expression returns true will be updated. # Note that it's evaluated last, after a conflict has been identified as a candidate to update. # Below is an example: # # BlogPost.import columns, values, on_duplicate_key_update: { conflict_target: [ :author_id ], condition: "blog_posts.title NOT LIKE '%sample%'", columns: [ :author_name ] } # # ===== :columns # # The :columns attribute can be either :all, an Array, or a Hash. # # ===== Using :all # # The :columns attribute can be :all. All columns other than the primary key will be updated. # If a list of column names is supplied, only those columns will be updated. # Below is an example: # # BlogPost.import columns, values, on_duplicate_key_update: { conflict_target: :slug, columns: :all } # # ===== Using an Array # # The :columns attribute can be an array of column names. The column names # are the only fields that are updated if a duplicate record is found. # Below is an example: # # BlogPost.import columns, values, on_duplicate_key_update: { conflict_target: :slug, columns: [ :date_modified, :content, :author ] } # # ===== Using a Hash # # The :columns option can be a hash of column names to model attribute name # mappings. This gives you finer grained control over what fields are updated # with what attributes on your model. Below is an example: # # BlogPost.import columns, attributes, on_duplicate_key_update: { conflict_target: :slug, columns: { title: :title } } # # = Returns # This returns an object which responds to +failed_instances+ and +num_inserts+. # * failed_instances - an array of objects that fails validation and were not committed to the database. An empty array if no validation is performed. # * num_inserts - the number of insert statements it took to import the data # * ids - the primary keys of the imported ids if the adapter supports it, otherwise an empty array. # * results - import results if the adapter supports it, otherwise an empty array. def bulk_import(*args) if args.first.is_a?( Array ) && args.first.first.is_a?(ActiveRecord::Base) options = {} options.merge!( args.pop ) if args.last.is_a?(Hash) models = args.first import_helper(models, options) else import_helper(*args) end end alias import bulk_import unless respond_to? :import # Imports a collection of values if all values are valid. Import fails at the # first encountered validation error and raises ActiveRecord::RecordInvalid # with the failed instance. def bulk_import!(*args) options = args.last.is_a?( Hash ) ? args.pop : {} options[:validate] = true options[:raise_error] = true bulk_import(*args, options) end alias import! bulk_import! unless respond_to? :import! def import_helper( *args ) options = { validate: true, timestamps: true } options.merge!( args.pop ) if args.last.is_a? Hash # making sure that current model's primary key is used options[:primary_key] = primary_key options[:locking_column] = locking_column if attribute_names.include?(locking_column) is_validating = options[:validate_with_context].present? ? true : options[:validate] validator = ActiveRecord::Import::Validator.new(self, options) # assume array of model objects if args.last.is_a?( Array ) && args.last.first.is_a?(ActiveRecord::Base) if args.length == 2 models = args.last column_names = args.first.dup else models = args.first column_names = if connection.respond_to?(:supports_virtual_columns?) && connection.supports_virtual_columns? columns.reject(&:virtual?).map(&:name) else self.column_names.dup end end if models.first.id.nil? Array(primary_key).each do |c| if column_names.include?(c) && columns_hash[c].type == :uuid column_names.delete(c) end end end default_values = column_defaults stored_attrs = respond_to?(:stored_attributes) ? stored_attributes : {} serialized_attrs = if defined?(ActiveRecord::Type::Serialized) attrs = column_names.select { |c| type_for_attribute(c.to_s).class == ActiveRecord::Type::Serialized } Hash[attrs.map { |a| [a, nil] }] else serialized_attributes end update_attrs = if record_timestamps && options[:timestamps] if respond_to?(:timestamp_attributes_for_update, true) send(:timestamp_attributes_for_update).map(&:to_sym) else new.send(:timestamp_attributes_for_update_in_model) end end array_of_attributes = [] models.each do |model| if supports_setting_primary_key_of_imported_objects? load_association_ids(model) end if is_validating && !validator.valid_model?(model) raise(ActiveRecord::RecordInvalid, model) if options[:raise_error] next end array_of_attributes << column_names.map do |name| if model.persisted? && update_attrs && update_attrs.include?(name.to_sym) && !model.send("#{name}_changed?") nil elsif stored_attrs.key?(name.to_sym) || serialized_attrs.key?(name.to_s) || default_values[name.to_s] model.read_attribute(name.to_s) else model.read_attribute_before_type_cast(name.to_s) end end end # supports array of hash objects elsif args.last.is_a?( Array ) && args.last.first.is_a?(Hash) if args.length == 2 array_of_hashes = args.last column_names = args.first.dup allow_extra_hash_keys = true else array_of_hashes = args.first column_names = array_of_hashes.first.keys allow_extra_hash_keys = false end array_of_attributes = array_of_hashes.map do |h| error_message = validate_hash_import(h, column_names, allow_extra_hash_keys) raise ArgumentError, error_message if error_message column_names.map do |key| h[key] end end # supports empty array elsif args.last.is_a?( Array ) && args.last.empty? return ActiveRecord::Import::Result.new([], 0, []) # supports 2-element array and array elsif args.size == 2 && args.first.is_a?( Array ) && args.last.is_a?( Array ) unless args.last.first.is_a?(Array) raise ArgumentError, "Last argument should be a two dimensional array '[[]]'. First element in array was a #{args.last.first.class}" end column_names, array_of_attributes = args # dup the passed args so we don't modify unintentionally column_names = column_names.dup array_of_attributes = array_of_attributes.map(&:dup) else raise ArgumentError, "Invalid arguments!" end # Force the primary key col into the insert if it's not # on the list and we are using a sequence and stuff a nil # value for it into each row so the sequencer will fire later symbolized_column_names = Array(column_names).map(&:to_sym) symbolized_primary_key = Array(primary_key).map(&:to_sym) if !symbolized_primary_key.to_set.subset?(symbolized_column_names.to_set) && connection.prefetch_primary_key? && sequence_name column_count = column_names.size column_names.concat(Array(primary_key)).uniq! columns_added = column_names.size - column_count new_fields = Array.new(columns_added) array_of_attributes.each { |a| a.concat(new_fields) } end # Don't modify incoming arguments on_duplicate_key_update = options[:on_duplicate_key_update] if on_duplicate_key_update updatable_columns = symbolized_column_names.reject { |c| symbolized_primary_key.include? c } options[:on_duplicate_key_update] = if on_duplicate_key_update.is_a?(Hash) on_duplicate_key_update.each_with_object({}) do |(k, v), duped_options| duped_options[k] = if k == :columns && v == :all updatable_columns elsif v.duplicable? v.dup else v end end elsif on_duplicate_key_update == :all updatable_columns elsif on_duplicate_key_update.duplicable? on_duplicate_key_update.dup else on_duplicate_key_update end end timestamps = {} # record timestamps unless disabled in ActiveRecord::Base if record_timestamps && options[:timestamps] timestamps = add_special_rails_stamps column_names, array_of_attributes, options end return_obj = if is_validating import_with_validations( column_names, array_of_attributes, options ) do |failed_instances| if models models.each { |m| failed_instances << m if m.errors.any? } else # create instances for each of our column/value sets arr = validations_array_for_column_names_and_attributes( column_names, array_of_attributes ) # keep track of the instance and the position it is currently at. if this fails # validation we'll use the index to remove it from the array_of_attributes arr.each_with_index do |hsh, i| model = new hsh.each_pair { |k, v| model[k] = v } next if validator.valid_model?(model) raise(ActiveRecord::RecordInvalid, model) if options[:raise_error] array_of_attributes[i] = nil failure = model.dup failure.errors.send(:initialize_dup, model.errors) failed_instances << failure end array_of_attributes.compact! end end else import_without_validations_or_callbacks( column_names, array_of_attributes, options ) end if options[:synchronize] sync_keys = options[:synchronize_keys] || Array(primary_key) synchronize( options[:synchronize], sync_keys) end return_obj.num_inserts = 0 if return_obj.num_inserts.nil? # if we have ids, then set the id on the models and mark the models as clean. if models && supports_setting_primary_key_of_imported_objects? set_attributes_and_mark_clean(models, return_obj, timestamps, options) # if there are auto-save associations on the models we imported that are new, import them as well import_associations(models, options.dup) if options[:recursive] end return_obj end # Imports the passed in +column_names+ and +array_of_attributes+ # given the passed in +options+ Hash with validations. Returns an # object with the methods +failed_instances+ and +num_inserts+. # +failed_instances+ is an array of instances that failed validations. # +num_inserts+ is the number of inserts it took to import the data. See # ActiveRecord::Base.import for more information on # +column_names+, +array_of_attributes+ and +options+. def import_with_validations( column_names, array_of_attributes, options = {} ) failed_instances = [] yield failed_instances if block_given? result = if options[:all_or_none] && failed_instances.any? ActiveRecord::Import::Result.new([], 0, [], []) else import_without_validations_or_callbacks( column_names, array_of_attributes, options ) end ActiveRecord::Import::Result.new(failed_instances, result.num_inserts, result.ids, result.results) end # Imports the passed in +column_names+ and +array_of_attributes+ # given the passed in +options+ Hash. This will return the number # of insert operations it took to create these records without # validations or callbacks. See ActiveRecord::Base.import for more # information on +column_names+, +array_of_attributes_ and # +options+. def import_without_validations_or_callbacks( column_names, array_of_attributes, options = {} ) return ActiveRecord::Import::Result.new([], 0, [], []) if array_of_attributes.empty? column_names = column_names.map(&:to_sym) scope_columns, scope_values = scope_attributes.to_a.transpose unless scope_columns.blank? scope_columns.zip(scope_values).each do |name, value| name_as_sym = name.to_sym next if column_names.include?(name_as_sym) is_sti = (name_as_sym == inheritance_column.to_sym && self < base_class) value = Array(value).first if is_sti column_names << name_as_sym array_of_attributes.each { |attrs| attrs << value } end end columns = column_names.each_with_index.map do |name, i| column = columns_hash[name.to_s] raise ActiveRecord::Import::MissingColumnError.new(name.to_s, i) if column.nil? column end columns_sql = "(#{column_names.map { |name| connection.quote_column_name(name) }.join(',')})" pre_sql_statements = connection.pre_sql_statements( options ) insert_sql = ['INSERT', pre_sql_statements, "INTO #{quoted_table_name} #{columns_sql} VALUES "] insert_sql = insert_sql.flatten.join(' ') values_sql = values_sql_for_columns_and_attributes(columns, array_of_attributes) number_inserted = 0 ids = [] results = [] if supports_import? # generate the sql post_sql_statements = connection.post_sql_statements( quoted_table_name, options ) batch_size = options[:batch_size] || values_sql.size values_sql.each_slice(batch_size) do |batch_values| # perform the inserts result = connection.insert_many( [insert_sql, post_sql_statements].flatten, batch_values, options, "#{model_name} Create Many Without Validations Or Callbacks" ) number_inserted += result.num_inserts ids += result.ids results += result.results end else transaction(requires_new: true) do values_sql.each do |values| ids << connection.insert(insert_sql + values) number_inserted += 1 end end end ActiveRecord::Import::Result.new([], number_inserted, ids, results) end private def set_attributes_and_mark_clean(models, import_result, timestamps, options) return if models.nil? models -= import_result.failed_instances # if ids were returned for all models we know all were updated if models.size == import_result.ids.size import_result.ids.each_with_index do |id, index| model = models[index] model.id = id timestamps.each do |attr, value| model.send(attr + "=", value) end end end if models.size == import_result.results.size columns = Array(options[:returning]) single_column = "#{columns.first}=" if columns.size == 1 import_result.results.each_with_index do |result, index| model = models[index] if single_column model.send(single_column, result) else columns.each_with_index do |column, col_index| model.send("#{column}=", result[col_index]) end end end end models.each do |model| if model.respond_to?(:clear_changes_information) # Rails 4.0 and higher model.clear_changes_information else # Rails 3.2 model.instance_variable_get(:@changed_attributes).clear end model.instance_variable_set(:@new_record, false) end end # Sync belongs_to association ids with foreign key field def load_association_ids(model) association_reflections = model.class.reflect_on_all_associations(:belongs_to) association_reflections.each do |association_reflection| column_name = association_reflection.foreign_key next if association_reflection.options[:polymorphic] association = model.association(association_reflection.name) association = association.target next if association.blank? || model.public_send(column_name).present? association_primary_key = association_reflection.association_primary_key model.public_send("#{column_name}=", association.send(association_primary_key)) end end def import_associations(models, options) # now, for all the dirty associations, collect them into a new set of models, then recurse. # notes: # does not handle associations that reference themselves # should probably take a hash to associations to follow. return if models.nil? associated_objects_by_class = {} models.each { |model| find_associated_objects_for_import(associated_objects_by_class, model) } # :on_duplicate_key_update not supported for associations options.delete(:on_duplicate_key_update) associated_objects_by_class.each_value do |associations| associations.each_value do |associated_records| associated_records.first.class.bulk_import(associated_records, options) unless associated_records.empty? end end end # We are eventually going to call Class.import so we build up a hash # of class => objects to import. def find_associated_objects_for_import(associated_objects_by_class, model) associated_objects_by_class[model.class.name] ||= {} return associated_objects_by_class unless model.id association_reflections = model.class.reflect_on_all_associations(:has_one) + model.class.reflect_on_all_associations(:has_many) association_reflections.each do |association_reflection| associated_objects_by_class[model.class.name][association_reflection.name] ||= [] association = model.association(association_reflection.name) association.loaded! # Wrap target in an array if not already association = Array(association.target) changed_objects = association.select { |a| a.new_record? || a.changed? } changed_objects.each do |child| child.public_send("#{association_reflection.foreign_key}=", model.id) # For polymorphic associations association_reflection.type.try do |type| child.public_send("#{type}=", model.class.base_class.name) end end associated_objects_by_class[model.class.name][association_reflection.name].concat changed_objects end associated_objects_by_class end # Returns SQL the VALUES for an INSERT statement given the passed in +columns+ # and +array_of_attributes+. def values_sql_for_columns_and_attributes(columns, array_of_attributes) # :nodoc: # connection gets called a *lot* in this high intensity loop. # Reuse the same one w/in the loop, otherwise it would keep being re-retreived (= lots of time for large imports) connection_memo = connection array_of_attributes.map do |arr| my_values = arr.each_with_index.map do |val, j| column = columns[j] # be sure to query sequence_name *last*, only if cheaper tests fail, because it's costly if val.nil? && Array(primary_key).first == column.name && !sequence_name.blank? connection_memo.next_value_for_sequence(sequence_name) elsif val.respond_to?(:to_sql) "(#{val.to_sql})" elsif column if respond_to?(:type_caster) # Rails 5.0 and higher type = type_for_attribute(column.name) val = type.type == :boolean ? type.cast(val) : type.serialize(val) connection_memo.quote(val) elsif column.respond_to?(:type_cast_from_user) # Rails 4.2 connection_memo.quote(column.type_cast_from_user(val), column) else # Rails 3.2, 4.0 and 4.1 if serialized_attributes.include?(column.name) val = serialized_attributes[column.name].dump(val) end # Fixes #443 to support binary (i.e. bytea) columns on PG val = column.type_cast(val) unless column.type.to_sym == :binary connection_memo.quote(val, column) end end end "(#{my_values.join(',')})" end end def add_special_rails_stamps( column_names, array_of_attributes, options ) timestamp_columns = {} timestamps = {} if respond_to?(:all_timestamp_attributes_in_model, true) # Rails 5.1 and higher timestamp_columns[:create] = timestamp_attributes_for_create_in_model timestamp_columns[:update] = timestamp_attributes_for_update_in_model else instance = new timestamp_columns[:create] = instance.send(:timestamp_attributes_for_create_in_model) timestamp_columns[:update] = instance.send(:timestamp_attributes_for_update_in_model) end # use tz as set in ActiveRecord::Base timestamp = ActiveRecord::Base.default_timezone == :utc ? Time.now.utc : Time.now [:create, :update].each do |action| timestamp_columns[action].each do |column| column = column.to_s timestamps[column] = timestamp index = column_names.index(column) || column_names.index(column.to_sym) if index # replace every instance of the array of attributes with our value array_of_attributes.each { |arr| arr[index] = timestamp if arr[index].nil? } else column_names << column array_of_attributes.each { |arr| arr << timestamp } end if supports_on_duplicate_key_update? && action == :update connection.add_column_for_on_duplicate_key_update(column, options) end end end timestamps end # Returns an Array of Hashes for the passed in +column_names+ and +array_of_attributes+. def validations_array_for_column_names_and_attributes( column_names, array_of_attributes ) # :nodoc: array_of_attributes.map { |values| Hash[column_names.zip(values)] } end # Checks that the imported hash has the required_keys, optionally also checks that the hash has # no keys beyond those required when `allow_extra_keys` is false. # returns `nil` if validation passes, or an error message if it fails def validate_hash_import(hash, required_keys, allow_extra_keys) # :nodoc: extra_keys = allow_extra_keys ? [] : hash.keys - required_keys missing_keys = required_keys - hash.keys return nil if extra_keys.empty? && missing_keys.empty? if allow_extra_keys <<-EOS Hash key mismatch. When importing an array of hashes with provided columns_names, each hash must contain keys for all column_names. Required keys: #{required_keys} Missing keys: #{missing_keys} Hash: #{hash} EOS else <<-EOS Hash key mismatch. When importing an array of hashes, all hashes must have the same keys. If you have records that are missing some values, we recommend you either set default values for the missing keys or group these records into batches by key set before importing. Required keys: #{required_keys} Extra keys: #{extra_keys} Missing keys: #{missing_keys} Hash: #{hash} EOS end end end end activerecord-import-0.28.2/lib/activerecord-import/synchronize.rb0000644000004100000410000000535313424140671025255 0ustar www-datawww-datamodule ActiveRecord # :nodoc: class Base # :nodoc: # Synchronizes the passed in ActiveRecord instances with data # from the database. This is like calling reload on an individual # ActiveRecord instance but it is intended for use on multiple instances. # # This uses one query for all instance updates and then updates existing # instances rather sending one query for each instance # # == Examples # # Synchronizing existing models by matching on the primary key field # posts = Post.where(author: "Zach").first # <.. out of system changes occur to change author name from Zach to Zachary..> # Post.synchronize posts # posts.first.author # => "Zachary" instead of Zach # # # Synchronizing using custom key fields # posts = Post.where(author: "Zach").first # <.. out of system changes occur to change the address of author 'Zach' to 1245 Foo Ln ..> # Post.synchronize posts, [:name] # queries on the :name column and not the :id column # posts.first.address # => "1245 Foo Ln" instead of whatever it was # def self.synchronize(instances, keys = [primary_key]) return if instances.empty? conditions = {} key_values = keys.map { |key| instances.map(&key.to_sym) } keys.zip(key_values).each { |key, values| conditions[key] = values } order = keys.map { |key| "#{key} ASC" }.join(",") klass = instances.first.class fresh_instances = klass.unscoped.where(conditions).order(order) instances.each do |instance| matched_instance = fresh_instances.detect do |fresh_instance| keys.all? { |key| fresh_instance.send(key) == instance.send(key) } end next unless matched_instance instance.send :clear_aggregation_cache instance.send :clear_association_cache instance.instance_variable_set :@attributes, matched_instance.instance_variable_get(:@attributes) if instance.respond_to?(:clear_changes_information) instance.clear_changes_information # Rails 4.2 and higher else instance.instance_variable_set :@attributes_cache, {} # Rails 4.0, 4.1 instance.changed_attributes.clear # Rails 3.2 instance.previous_changes.clear end # Since the instance now accurately reflects the record in # the database, ensure that instance.persisted? is true. instance.instance_variable_set '@new_record', false instance.instance_variable_set '@destroyed', false end end # See ActiveRecord::ConnectionAdapters::AbstractAdapter.synchronize def synchronize(instances, key = [ActiveRecord::Base.primary_key]) self.class.synchronize(instances, key) end end end activerecord-import-0.28.2/lib/activerecord-import/active_record/0000755000004100000410000000000013424140671025160 5ustar www-datawww-dataactiverecord-import-0.28.2/lib/activerecord-import/active_record/adapters/0000755000004100000410000000000013424140671026763 5ustar www-datawww-dataactiverecord-import-0.28.2/lib/activerecord-import/active_record/adapters/abstract_adapter.rb0000644000004100000410000000037013424140671032613 0ustar www-datawww-datarequire "activerecord-import/adapters/abstract_adapter" module ActiveRecord # :nodoc: module ConnectionAdapters # :nodoc: class AbstractAdapter # :nodoc: include ActiveRecord::Import::AbstractAdapter::InstanceMethods end end end activerecord-import-0.28.2/lib/activerecord-import/active_record/adapters/jdbcsqlite3_adapter.rb0000644000004100000410000000033613424140671033221 0ustar www-datawww-datarequire "active_record/connection_adapters/sqlite3_adapter" require "activerecord-import/adapters/sqlite3_adapter" class ActiveRecord::ConnectionAdapters::SQLite3Adapter include ActiveRecord::Import::SQLite3Adapter end ././@LongLink0000644000000000000000000000015400000000000011603 Lustar rootrootactiverecord-import-0.28.2/lib/activerecord-import/active_record/adapters/seamless_database_pool_adapter.rbactiverecord-import-0.28.2/lib/activerecord-import/active_record/adapters/seamless_database_pool_ada0000644000004100000410000000042713424140671034207 0ustar www-datawww-datarequire "seamless_database_pool" require "active_record/connection_adapters/seamless_database_pool_adapter" require "activerecord-import/adapters/mysql_adapter" class ActiveRecord::ConnectionAdapters::SeamlessDatabasePoolAdapter include ActiveRecord::Import::MysqlAdapter end activerecord-import-0.28.2/lib/activerecord-import/active_record/adapters/mysql2_adapter.rb0000644000004100000410000000033213424140671032235 0ustar www-datawww-datarequire "active_record/connection_adapters/mysql2_adapter" require "activerecord-import/adapters/mysql2_adapter" class ActiveRecord::ConnectionAdapters::Mysql2Adapter include ActiveRecord::Import::Mysql2Adapter end activerecord-import-0.28.2/lib/activerecord-import/active_record/adapters/jdbcmysql_adapter.rb0000644000004100000410000000032613424140671033001 0ustar www-datawww-datarequire "active_record/connection_adapters/mysql_adapter" require "activerecord-import/adapters/mysql_adapter" class ActiveRecord::ConnectionAdapters::MysqlAdapter include ActiveRecord::Import::MysqlAdapter end activerecord-import-0.28.2/lib/activerecord-import/active_record/adapters/jdbcpostgresql_adapter.rb0000644000004100000410000000035213424140671034036 0ustar www-datawww-datarequire "active_record/connection_adapters/postgresql_adapter" require "activerecord-import/adapters/postgresql_adapter" class ActiveRecord::ConnectionAdapters::PostgreSQLAdapter include ActiveRecord::Import::PostgreSQLAdapter end activerecord-import-0.28.2/lib/activerecord-import/active_record/adapters/sqlite3_adapter.rb0000644000004100000410000000033613424140671032376 0ustar www-datawww-datarequire "active_record/connection_adapters/sqlite3_adapter" require "activerecord-import/adapters/sqlite3_adapter" class ActiveRecord::ConnectionAdapters::SQLite3Adapter include ActiveRecord::Import::SQLite3Adapter end activerecord-import-0.28.2/lib/activerecord-import/active_record/adapters/postgresql_adapter.rb0000644000004100000410000000035213424140671033213 0ustar www-datawww-datarequire "active_record/connection_adapters/postgresql_adapter" require "activerecord-import/adapters/postgresql_adapter" class ActiveRecord::ConnectionAdapters::PostgreSQLAdapter include ActiveRecord::Import::PostgreSQLAdapter end activerecord-import-0.28.2/lib/activerecord-import/mysql2.rb0000644000004100000410000000046213424140671024125 0ustar www-datawww-datawarn <<-MSG [DEPRECATION] loading activerecord-import via 'require "activerecord-import/"' is deprecated. Update to autorequire using 'require "activerecord-import"'. See http://github.com/zdennis/activerecord-import/wiki/Requiring for more information MSG require "activerecord-import" activerecord-import-0.28.2/lib/activerecord-import/postgresql.rb0000644000004100000410000000046213424140671025101 0ustar www-datawww-datawarn <<-MSG [DEPRECATION] loading activerecord-import via 'require "activerecord-import/"' is deprecated. Update to autorequire using 'require "activerecord-import"'. See http://github.com/zdennis/activerecord-import/wiki/Requiring for more information MSG require "activerecord-import" activerecord-import-0.28.2/lib/activerecord-import/value_sets_parser.rb0000644000004100000410000000365013424140671026426 0ustar www-datawww-datamodule ActiveRecord::Import class ValueSetTooLargeError < StandardError attr_reader :size def initialize(msg = "Value set exceeds max size", size = 0) @size = size super(msg) end end class ValueSetsBytesParser attr_reader :reserved_bytes, :max_bytes, :values def self.parse(values, options) new(values, options).parse end def initialize(values, options) @values = values @reserved_bytes = options[:reserved_bytes] || 0 @max_bytes = options.fetch(:max_bytes) { default_max_bytes } end def parse value_sets = [] arr = [] current_size = 0 values.each_with_index do |val, i| comma_bytes = arr.size insert_size = reserved_bytes + val.bytesize if insert_size > max_bytes raise ValueSetTooLargeError.new("#{insert_size} bytes exceeds the max allowed for an insert [#{@max_bytes}]", insert_size) end bytes_thus_far = reserved_bytes + current_size + val.bytesize + comma_bytes if bytes_thus_far <= max_bytes current_size += val.bytesize arr << val else value_sets << arr arr = [val] current_size = val.bytesize end # if we're on the last iteration push whatever we have in arr to value_sets value_sets << arr if i == (values.size - 1) end value_sets end private def default_max_bytes values_in_bytes = values.sum(&:bytesize) comma_separated_bytes = values.size - 1 reserved_bytes + values_in_bytes + comma_separated_bytes end end class ValueSetsRecordsParser attr_reader :max_records, :values def self.parse(values, options) new(values, options).parse end def initialize(values, options) @values = values @max_records = options[:max_records] end def parse @values.in_groups_of(max_records, false) end end end activerecord-import-0.28.2/lib/activerecord-import/sqlite3.rb0000644000004100000410000000046213424140671024262 0ustar www-datawww-datawarn <<-MSG [DEPRECATION] loading activerecord-import via 'require "activerecord-import/"' is deprecated. Update to autorequire using 'require "activerecord-import"'. See http://github.com/zdennis/activerecord-import/wiki/Requiring for more information MSG require "activerecord-import" activerecord-import-0.28.2/lib/activerecord-import.rb0000644000004100000410000000023513424140671022674 0ustar www-datawww-data# rubocop:disable Style/FileName require "active_support/lazy_load_hooks" ActiveSupport.on_load(:active_record) do require "activerecord-import/base" end activerecord-import-0.28.2/Brewfile0000644000004100000410000000005413424140671017300 0ustar www-datawww-databrew "mysql" brew "postgresql" brew "sqlite"activerecord-import-0.28.2/README.markdown0000644000004100000410000005700613424140671020330 0ustar www-datawww-data# activerecord-import [![Build Status](https://travis-ci.org/zdennis/activerecord-import.svg?branch=master)](https://travis-ci.org/zdennis/activerecord-import) activerecord-import is a library for bulk inserting data using ActiveRecord. One of its major features is following activerecord associations and generating the minimal number of SQL insert statements required, avoiding the N+1 insert problem. An example probably explains it best. Say you had a schema like this: - Publishers have Books - Books have Reviews and you wanted to bulk insert 100 new publishers with 10K books and 3 reviews per book. This library will follow the associations down and generate only 3 SQL insert statements - one for the publishers, one for the books, and one for the reviews. In contrast, the standard ActiveRecord save would generate 100 insert statements for the publishers, then it would visit each publisher and save all the books: 100 * 10,000 = 1,000,000 SQL insert statements and then the reviews: 100 * 10,000 * 3 = 3M SQL insert statements, That would be about 4M SQL insert statements vs 3, which results in vastly improved performance. In our case, it converted an 18 hour batch process to <2 hrs. The gem provides the following high-level features: * activerecord-import can work with raw columns and arrays of values (fastest) * activerecord-import works with model objects (faster) * activerecord-import can perform validations (fast) * activerecord-import can perform on duplicate key updates (requires MySQL or Postgres 9.5+) ## Table of Contents * [Examples](#examples) * [Introduction](#introduction) * [Columns and Arrays](#columns-and-arrays) * [Hashes](#hashes) * [ActiveRecord Models](#activerecord-models) * [Batching](#batching) * [Recursive](#recursive) * [Options](#options) * [Duplicate Key Ignore](#duplicate-key-ignore) * [Duplicate Key Update](#duplicate-key-update) * [Return Info](#return-info) * [Counter Cache](#counter-cache) * [ActiveRecord Timestamps](#activerecord-timestamps) * [Callbacks](#callbacks) * [Supported Adapters](#supported-adapters) * [Additional Adapters](#additional-adapters) * [Requiring](#requiring) * [Autoloading via Bundler](#autoloading-via-bundler) * [Manually Loading](#manually-loading) * [Load Path Setup](#load-path-setup) * [Conflicts With Other Gems](#conflicts-with-other-gems) * [More Information](#more-information) * [Contributing](#contributing) * [Running Tests](#running-tests) ### Examples #### Introduction Without `activerecord-import`, you'd write something like this: ```ruby 10.times do |i| Book.create! :name => "book #{i}" end ``` This would end up making 10 SQL calls. YUCK! With `activerecord-import`, you can instead do this: ```ruby books = [] 10.times do |i| books << Book.new(:name => "book #{i}") end Book.import books # or use import! ``` and only have 1 SQL call. Much better! #### Columns and Arrays The `import` method can take an array of column names (string or symbols) and an array of arrays. Each child array represents an individual record and its list of values in the same order as the columns. This is the fastest import mechanism and also the most primitive. ```ruby columns = [ :title, :author ] values = [ ['Book1', 'FooManChu'], ['Book2', 'Bob Jones'] ] # Importing without model validations Book.import columns, values, :validate => false # Import with model validations Book.import columns, values, :validate => true # when not specified :validate defaults to true Book.import columns, values ``` #### Hashes The `import` method can take an array of hashes. The keys map to the column names in the database. ```ruby values = [{ title: 'Book1', author: 'FooManChu' }, { title: 'Book2', author: 'Bob Jones'}] # Importing without model validations Book.import values, validate: false # Import with model validations Book.import values, validate: true # when not specified :validate defaults to true Book.import values ``` h2. Import Using Hashes and Explicit Column Names The `import` method can take an array of column names and an array of hash objects. The column names are used to determine what fields of data should be imported. The following example will only import books with the `title` field: ```ruby books = [ { title: "Book 1", author: "FooManChu" }, { title: "Book 2", author: "Bob Jones" } ] columns = [ :title ] # without validations Book.import columns, books, validate: false # with validations Book.import columns, books, validate: true # when not specified :validate defaults to true Book.import columns, books # result in table books # title | author #--------|-------- # Book 1 | NULL # Book 2 | NULL ``` Using hashes will only work if the columns are consistent in every hash of the array. If this does not hold, an exception will be raised. There are two workarounds: use the array to instantiate an array of ActiveRecord objects and then pass that into `import` or divide the array into multiple ones with consistent columns and import each one separately. See https://github.com/zdennis/activerecord-import/issues/507 for discussion. ```ruby arr = [ { bar: 'abc' }, { baz: 'xyz' }, { bar: '123', baz: '456' } ] # An exception will be raised Foo.import arr # better arr.map! { |args| Foo.new(args) } Foo.import arr # better arr.group_by(&:keys).each_value do |v| Foo.import v end ``` #### ActiveRecord Models The `import` method can take an array of models. The attributes will be pulled off from each model by looking at the columns available on the model. ```ruby books = [ Book.new(:title => "Book 1", :author => "FooManChu"), Book.new(:title => "Book 2", :author => "Bob Jones") ] # without validations Book.import books, :validate => false # with validations Book.import books, :validate => true # when not specified :validate defaults to true Book.import books ``` The `import` method can take an array of column names and an array of models. The column names are used to determine what fields of data should be imported. The following example will only import books with the `title` field: ```ruby books = [ Book.new(:title => "Book 1", :author => "FooManChu"), Book.new(:title => "Book 2", :author => "Bob Jones") ] columns = [ :title ] # without validations Book.import columns, books, :validate => false # with validations Book.import columns, books, :validate => true # when not specified :validate defaults to true Book.import columns, books # result in table books # title | author #--------|-------- # Book 1 | NULL # Book 2 | NULL ``` #### Batching The `import` method can take a `batch_size` option to control the number of rows to insert per INSERT statement. The default is the total number of records being inserted so there is a single INSERT statement. ```ruby books = [ Book.new(:title => "Book 1", :author => "FooManChu"), Book.new(:title => "Book 2", :author => "Bob Jones"), Book.new(:title => "Book 1", :author => "John Doe"), Book.new(:title => "Book 2", :author => "Richard Wright") ] columns = [ :title ] # 2 INSERT statements for 4 records Book.import columns, books, :batch_size => 2 ``` #### Recursive NOTE: This only works with PostgreSQL. Assume that Books has_many Reviews. ```ruby books = [] 10.times do |i| book = Book.new(:name => "book #{i}") book.reviews.build(:title => "Excellent") books << book end Book.import books, recursive: true ``` ### Options Key | Options | Default | Description ----------------------- | --------------------- | ------------------ | ----------- :validate | `true`/`false` | `true` | Whether or not to run `ActiveRecord` validations (uniqueness skipped). :validate_uniqueness | `true`/`false` | `false` | Whether or not to run uniqueness validations, has potential pitfalls, use with caution (requires `>= v0.27.0`). :on_duplicate_key_ignore| `true`/`false` | `false` | Allows skipping records with duplicate keys. See [here](https://github.com/zdennis/activerecord-import/#duplicate-key-ignore) for more details. :ignore | `true`/`false` | `false` | Alias for :on_duplicate_key_ignore. :on_duplicate_key_update| :all, `Array`, `Hash` | N/A | Allows upsert logic to be used. See [here](https://github.com/zdennis/activerecord-import/#duplicate-key-update) for more details. :synchronize | `Array` | N/A | An array of ActiveRecord instances. This synchronizes existing instances in memory with updates from the import. :timestamps | `true`/`false` | `true` | Enables/disables timestamps on imported records. :recursive | `true`/`false` | `false` | Imports has_many/has_one associations (PostgreSQL only). :batch_size | `Integer` | total # of records | Max number of records to insert per import :raise_error | `true`/`false` | `false` | Throws an exception if there are invalid records. `import!` is a shortcut for this. #### Duplicate Key Ignore [MySQL](http://dev.mysql.com/doc/refman/5.0/en/insert-on-duplicate.html), [SQLite](https://www.sqlite.org/lang_insert.html), and [PostgreSQL](https://www.postgresql.org/docs/current/static/sql-insert.html#SQL-ON-CONFLICT) (9.5+) support `on_duplicate_key_ignore` which allows you to skip records if a primary or unique key constraint is violated. For Postgres 9.5+ it adds `ON CONFLICT DO NOTHING`, for MySQL it uses `INSERT IGNORE`, and for SQLite it uses `INSERT OR IGNORE`. Cannot be enabled on a recursive import. For database adapters that normally support setting primary keys on imported objects, this option prevents that from occurring. ```ruby book = Book.create! title: "Book1", author: "FooManChu" book.title = "Updated Book Title" book.author = "Bob Barker" Book.import [book], on_duplicate_key_ignore: true book.reload.title # => "Book1" (stayed the same) book.reload.author # => "FooManChu" (stayed the same) ``` The option `:on_duplicate_key_ignore` is bypassed when `:recursive` is enabled for [PostgreSQL imports](https://github.com/zdennis/activerecord-import/wiki#recursive-example-postgresql-only). #### Duplicate Key Update MySQL, PostgreSQL (9.5+), and SQLite (3.24.0+) support `on duplicate key update` (also known as "upsert") which allows you to specify fields whose values should be updated if a primary or unique key constraint is violated. One big difference between MySQL and PostgreSQL support is that MySQL will handle any conflict that happens, but PostgreSQL requires that you specify which columns the conflict would occur over. SQLite models its upsert support after PostgreSQL. This will use MySQL's `ON DUPLICATE KEY UPDATE` or Postgres/SQLite `ON CONFLICT DO UPDATE` to do upsert. Basic Update ```ruby book = Book.create! title: "Book1", author: "FooManChu" book.title = "Updated Book Title" book.author = "Bob Barker" # MySQL version Book.import [book], on_duplicate_key_update: [:title] # PostgreSQL version Book.import [book], on_duplicate_key_update: {conflict_target: [:id], columns: [:title]} # PostgreSQL shorthand version (conflict target must be primary key) Book.import [book], on_duplicate_key_update: [:title] book.reload.title # => "Updated Book Title" (changed) book.reload.author # => "FooManChu" (stayed the same) ``` Using the value from another column ```ruby book = Book.create! title: "Book1", author: "FooManChu" book.title = "Updated Book Title" # MySQL version Book.import [book], on_duplicate_key_update: {author: :title} # PostgreSQL version (no shorthand version) Book.import [book], on_duplicate_key_update: { conflict_target: [:id], columns: {author: :title} } book.reload.title # => "Book1" (stayed the same) book.reload.author # => "Updated Book Title" (changed) ``` Using Custom SQL ```ruby book = Book.create! title: "Book1", author: "FooManChu" book.author = "Bob Barker" # MySQL version Book.import [book], on_duplicate_key_update: "author = values(author)" # PostgreSQL version Book.import [book], on_duplicate_key_update: { conflict_target: [:id], columns: "author = excluded.author" } # PostgreSQL shorthand version (conflict target must be primary key) Book.import [book], on_duplicate_key_update: "author = excluded.author" book.reload.title # => "Book1" (stayed the same) book.reload.author # => "Bob Barker" (changed) ``` PostgreSQL Using constraints ```ruby book = Book.create! title: "Book1", author: "FooManChu", edition: 3, published_at: nil book.published_at = Time.now # in migration execute <<-SQL ALTER TABLE books ADD CONSTRAINT for_upsert UNIQUE (title, author, edition); SQL # PostgreSQL version Book.import [book], on_duplicate_key_update: {constraint_name: :for_upsert, columns: [:published_at]} book.reload.title # => "Book1" (stayed the same) book.reload.author # => "FooManChu" (stayed the same) book.reload.edition # => 3 (stayed the same) book.reload.published_at # => 2017-10-09 (changed) ``` ```ruby Book.import books, validate_uniqueness: true ``` ### Return Info The `import` method returns a `Result` object that responds to `failed_instances` and `num_inserts`. Additionally, for users of Postgres, there will be two arrays `ids` and `results` that can be accessed`. ```ruby articles = [ Article.new(author_id: 1, title: 'First Article', content: 'This is the first article'), Article.new(author_id: 2, title: 'Second Article', content: ''), Article.new(author_id: 3, content: '') ] demo = Article.import(articles), returning: :title # => # [#
] demo.num_inserts => 1, demo.ids => ["1", "2"] # for Postgres => [] # for other DBs demo.results => ["First Article", "Second Article"] # for Postgres => [] for other DBs ``` ### Counter Cache When running `import`, `activerecord-import` does not automatically update counter cache columns. To update these columns, you will need to do one of the following: * Provide values to the column as an argument on your object that is passed in. * Manually update the column after the record has been imported. ### ActiveRecord Timestamps If you're familiar with ActiveRecord you're probably familiar with its timestamp columns: created_at, created_on, updated_at, updated_on, etc. When importing data the timestamp fields will continue to work as expected and each timestamp column will be set. Should you wish to specify those columns, you may use the option `timestamps: false`. However, it is also possible to set just `:created_at` in specific records. In this case despite using `timestamps: true`, `:created_at` will be updated only in records where that field is `nil`. Same rule applies for record associations when enabling the option `recursive: true`. If you are using custom time zones, these will be respected when performing imports as well as long as `ActiveRecord::Base.default_timezone` is set, which for practically all Rails apps it is ### Callbacks ActiveRecord callbacks related to [creating](http://guides.rubyonrails.org/active_record_callbacks.html#creating-an-object), [updating](http://guides.rubyonrails.org/active_record_callbacks.html#updating-an-object), or [destroying](http://guides.rubyonrails.org/active_record_callbacks.html#destroying-an-object) records (other than `before_validation` and `after_validation`) will NOT be called when calling the import method. This is because it is mass importing rows of data and doesn't necessarily have access to in-memory ActiveRecord objects. If you do have a collection of in-memory ActiveRecord objects you can do something like this: ```ruby books.each do |book| book.run_callbacks(:save) { false } book.run_callbacks(:create) { false } end Book.import(books) ``` This will run before_create and before_save callbacks on each item. The `false` argument is needed to prevent after_save being run, which wouldn't make sense prior to bulk import. Something to note in this example is that the before_create and before_save callbacks will run before the validation callbacks. If that is an issue, another possible approach is to loop through your models first to do validations and then only run callbacks on and import the valid models. ```ruby valid_books = [] invalid_books = [] books.each do |book| if book.valid? valid_books << book else invalid_books << book end end valid_books.each do |book| book.run_callbacks(:save) { false } book.run_callbacks(:create) { false } end Book.import valid_books, validate: false ``` ### Supported Adapters The following database adapters are currently supported: * MySQL - supports core import functionality plus on duplicate key update support (included in activerecord-import 0.1.0 and higher) * MySQL2 - supports core import functionality plus on duplicate key update support (included in activerecord-import 0.2.0 and higher) * PostgreSQL - supports core import functionality (included in activerecord-import 0.1.0 and higher) * SQLite3 - supports core import functionality (included in activerecord-import 0.1.0 and higher) * Oracle - supports core import functionality through DML trigger (available as an external gem: [activerecord-import-oracle_enhanced](https://github.com/keeguon/activerecord-import-oracle_enhanced) * SQL Server - supports core import functionality (available as an external gem: [activerecord-import-sqlserver](https://github.com/keeguon/activerecord-import-sqlserver) If your adapter isn't listed here, please consider creating an external gem as described in the README to provide support. If you do, feel free to update this wiki to include a link to the new adapter's repository! To test which features are supported by your adapter, use the following methods on a model class: * `supports_import?(*args)` * `supports_on_duplicate_key_update?` * `supports_setting_primary_key_of_imported_objects?` ### Additional Adapters Additional adapters can be provided by gems external to activerecord-import by providing an adapter that matches the naming convention setup by activerecord-import (and subsequently activerecord) for dynamically loading adapters. This involves also providing a folder on the load path that follows the activerecord-import naming convention to allow activerecord-import to dynamically load the file. When `ActiveRecord::Import.require_adapter("fake_name")` is called the require will be: ```ruby require 'activerecord-import/active_record/adapters/fake_name_adapter' ``` This allows an external gem to dynamically add an adapter without the need to add any file/code to the core activerecord-import gem. ### Requiring Note: These instructions will only work if you are using version 0.2.0 or higher. #### Autoloading via Bundler If you are using Rails or otherwise autoload your dependencies via Bundler, all you need to do add the gem to your `Gemfile` like so: ```ruby gem 'activerecord-import' ``` #### Manually Loading You may want to manually load activerecord-import for one reason or another. First, add the `require: false` argument like so: ```ruby gem 'activerecord-import', require: false ``` This will allow you to load up activerecord-import in the file or files where you are using it and only load the parts you need. If you are doing this within Rails and ActiveRecord has established a database connection (such as within a controller), you will need to do extra initialization work: ```ruby require 'activerecord-import/base' # load the appropriate database adapter (postgresql, mysql2, sqlite3, etc) require 'activerecord-import/active_record/adapters/postgresql_adapter' ``` If your gem dependencies aren’t autoloaded, and your script will be establishing a database connection, then simply require activerecord-import after ActiveRecord has been loaded, i.e.: ```ruby require 'active_record' require 'activerecord-import' ``` ### Load Path Setup To understand how rubygems loads code you can reference the following: http://guides.rubygems.org/patterns/#loading_code And an example of how active_record dynamically load adapters: https://github.com/rails/rails/blob/master/activerecord/lib/active_record/connection_adapters/connection_specification.rb In summary, when a gem is loaded rubygems adds the `lib` folder of the gem to the global load path `$LOAD_PATH` so that all `require` lookups will not propagate through all of the folders on the load path. When a `require` is issued each folder on the `$LOAD_PATH` is checked for the file and/or folder referenced. This allows a gem (like activerecord-import) to define push the activerecord-import folder (or namespace) on the `$LOAD_PATH` and any adapters provided by activerecord-import will be found by rubygems when the require is issued. If `fake_name` adapter is needed by a gem (potentially called `activerecord-import-fake_name`) then the folder structure should look as follows: ```bash activerecord-import-fake_name/ |-- activerecord-import-fake_name.gemspec |-- lib | |-- activerecord-import-fake_name.rb | |-- activerecord-import-fake_name | | |-- version.rb | |-- activerecord-import | | |-- active_record | | | |-- adapters | | | |-- fake_name_adapter.rb ``` When rubygems pushes the `lib` folder onto the load path a `require` will now find `activerecord-import/active_record/adapters/fake_name_adapter` as it runs through the lookup process for a ruby file under that path in `$LOAD_PATH` ### Conflicts With Other Gems `activerecord-import` adds the `.import` method onto `ActiveRecord::Base`. There are other gems, such as `elasticsearch-rails`, that do the same thing. In conflicts such as this, there is an aliased method named `.bulk_import` that can be used interchangeably. If you are using the `apartment` gem, there is a weird triple interaction between that gem, `activerecord-import`, and `activerecord` involving caching of the `sequence_name` of a model. This can be worked around by explcitly setting this value within the model. For example: ```ruby class Post < ActiveRecord::Base self.sequence_name = "posts_seq" end ``` Another way to work around the issue is to call `.reset_sequence_name` on the model. For example: ```ruby schemas.all.each do |schema| Apartment::Tenant.switch! schema.name ActiveRecord::Base.transaction do Post.reset_sequence_name Post.import posts end end ``` See https://github.com/zdennis/activerecord-import/issues/233 for further discussion. ### More Information For more information on activerecord-import please see its wiki: https://github.com/zdennis/activerecord-import/wiki To document new information, please add to the README instead of the wiki. See https://github.com/zdennis/activerecord-import/issues/397 for discussion. ### Contributing #### Running Tests The first thing you need to do is set up your database(s): * copy `test/database.yml.sample` to `test/database.yml` * modify `test/database.yml` for your database settings * create databases as needed After that, you can run the tests. They run against multiple tests and ActiveRecord versions. This is one example of how to run the tests: ```ruby rm Gemfile.lock AR_VERSION=4.2 bundle install AR_VERSION=4.2 bundle exec rake test:postgresql test:sqlite3 test:mysql2 ``` Once you have pushed up your changes, you can find your CI results [here](https://travis-ci.org/zdennis/activerecord-import/). # License This is licensed under the ruby license. # Author Zach Dennis (zach.dennis@gmail.com) # Contributors * Jordan Owens (@jkowens) * Erik Michaels-Ober (@sferik) * Blythe Dunham * Gabe da Silveira * Henry Work * James Herdman * Marcus Crafter * Thibaud Guillaume-Gentil * Mark Van Holstyn * Victor Costan * Dillon Welch activerecord-import-0.28.2/Gemfile0000644000004100000410000000206013424140671017110 0ustar www-datawww-datasource 'https://rubygems.org' gemspec version = ENV['AR_VERSION'].to_f mysql2_version = '0.3.0' mysql2_version = '0.4.0' if version >= 4.2 group :development, :test do gem 'rubocop', '~> 0.40.0' gem 'rake' end # Database Adapters platforms :ruby do gem "mysql2", "~> #{mysql2_version}" gem "pg", "~> 0.9" gem "sqlite3", "~> 1.3.10" gem "seamless_database_pool", "~> 1.0.20" end platforms :jruby do gem "jdbc-mysql" gem "jdbc-postgres" gem "activerecord-jdbcsqlite3-adapter", "~> 1.3" gem "activerecord-jdbcmysql-adapter", "~> 1.3" gem "activerecord-jdbcpostgresql-adapter", "~> 1.3" end # Support libs gem "factory_bot" gem "timecop" gem "chronic" gem "mocha", "~> 1.3.0" # Debugging platforms :jruby do gem "ruby-debug", "= 0.10.4" end platforms :mri_19 do gem "debugger" end platforms :ruby do gem "pry-byebug" gem "rb-readline" end if version >= 4.0 gem "minitest" else gem "test-unit" end eval_gemfile File.expand_path("../gemfiles/#{version}.gemfile", __FILE__) activerecord-import-0.28.2/benchmarks/0000755000004100000410000000000013424140671017734 5ustar www-datawww-dataactiverecord-import-0.28.2/benchmarks/schema/0000755000004100000410000000000013424140671021174 5ustar www-datawww-dataactiverecord-import-0.28.2/benchmarks/schema/mysql_schema.rb0000644000004100000410000000100213424140671024177 0ustar www-datawww-dataActiveRecord::Schema.define do create_table :test_myisam, options: 'ENGINE=MyISAM', force: true do |t| t.column :my_name, :string, null: false t.column :description, :string end create_table :test_innodb, options: 'ENGINE=InnoDb', force: true do |t| t.column :my_name, :string, null: false t.column :description, :string end create_table :test_memory, options: 'ENGINE=Memory', force: true do |t| t.column :my_name, :string, null: false t.column :description, :string end end activerecord-import-0.28.2/benchmarks/models/0000755000004100000410000000000013424140671021217 5ustar www-datawww-dataactiverecord-import-0.28.2/benchmarks/models/test_myisam.rb0000644000004100000410000000011413424140671024076 0ustar www-datawww-dataclass TestMyISAM < ActiveRecord::Base self.table_name = 'test_myisam' end activerecord-import-0.28.2/benchmarks/models/test_memory.rb0000644000004100000410000000011413424140671024107 0ustar www-datawww-dataclass TestMemory < ActiveRecord::Base self.table_name = 'test_memory' end activerecord-import-0.28.2/benchmarks/models/test_innodb.rb0000644000004100000410000000011413424140671024050 0ustar www-datawww-dataclass TestInnoDb < ActiveRecord::Base self.table_name = 'test_innodb' end activerecord-import-0.28.2/benchmarks/benchmark.rb0000644000004100000410000000403713424140671022217 0ustar www-datawww-datarequire 'pathname' require "fileutils" require "active_record" require "active_record/base" benchmark_dir = File.dirname(__FILE__) $LOAD_PATH.unshift('.') # Get the gem into the load path $LOAD_PATH.unshift(File.join(benchmark_dir, '..', 'lib')) # Load the benchmark files Dir[File.join( benchmark_dir, 'lib', '*.rb' )].sort.each { |f| require f } # Parse the options passed in via the command line options = BenchmarkOptionParser.parse( ARGV ) FileUtils.mkdir_p 'log' ActiveRecord::Base.configurations["test"] = YAML.load_file(File.join(benchmark_dir, "../test/database.yml"))[options.adapter] ActiveRecord::Base.logger = Logger.new("log/test.log") ActiveRecord::Base.logger.level = Logger::DEBUG ActiveRecord::Base.default_timezone = :utc require "activerecord-import" ActiveRecord::Base.establish_connection(:test) ActiveSupport::Notifications.subscribe(/active_record.sql/) do |_, _, _, _, hsh| ActiveRecord::Base.logger.info hsh[:sql] end # Load base/generic schema require File.join(benchmark_dir, "../test/schema/version") require File.join(benchmark_dir, "../test/schema/generic_schema") adapter_schema = File.join(benchmark_dir, "schema/#{options.adapter}_schema.rb") require adapter_schema if File.exist?(adapter_schema) Dir[File.dirname(__FILE__) + "/models/*.rb"].each { |file| require file } require File.join( benchmark_dir, 'lib', "#{options.adapter}_benchmark" ) table_types = nil table_types = if options.benchmark_all_types ["all"] else options.table_types.keys end letter = options.adapter[0].chr clazz_str = letter.upcase + options.adapter[1..-1].downcase clazz = Object.const_get( clazz_str + "Benchmark" ) benchmarks = [] options.number_of_objects.each do |num| benchmarks << (benchmark = clazz.new) benchmark.send( "benchmark", table_types, num ) end options.outputs.each do |output| format = output.format.downcase output_module = Object.const_get( "OutputTo#{format.upcase}" ) benchmarks.each do |benchmark| output_module.output_results( output.filename, benchmark.results ) end end puts puts "Done with benchmark!" activerecord-import-0.28.2/benchmarks/lib/0000755000004100000410000000000013424140671020502 5ustar www-datawww-dataactiverecord-import-0.28.2/benchmarks/lib/base.rb0000644000004100000410000001106013424140671021737 0ustar www-datawww-dataclass BenchmarkBase attr_reader :results # The main benchmark method dispatcher. This dispatches the benchmarks # to actual benchmark_xxxx methods. # # == PARAMETERS # * table_types - an array of table types to benchmark # * num - the number of record insertions to test def benchmark( table_types, num ) array_of_cols_and_vals = build_array_of_cols_and_vals( num ) table_types.each do |table_type| send( "benchmark_#{table_type}", array_of_cols_and_vals ) end end # Returns an OpenStruct which contains two attritues, +description+ and +tms+ after performing an # actual benchmark. # # == PARAMETERS # * description - the description of the block that is getting benchmarked # * blk - the block of code to benchmark # # == RETURNS # An OpenStruct object with the following attributes: # * description - the description of the benchmark ran # * tms - a Benchmark::Tms containing the results of the benchmark def bm( description ) tms = nil puts "Benchmarking #{description}" Benchmark.bm { |x| tms = x.report { yield } } delete_all failed = false OpenStruct.new description: description, tms: tms, failed: failed end # Given a model class (ie: Topic), and an array of columns and value sets # this will perform all of the benchmarks necessary for this library. # # == PARAMETERS # * model_clazz - the model class to benchmark (ie: Topic) # * array_of_cols_and_vals - an array of column identifiers and value sets # # == RETURNS # returns true def bm_model( model_clazz, array_of_cols_and_vals ) puts puts "------ Benchmarking #{model_clazz.name} -------" cols, vals = array_of_cols_and_vals num_inserts = vals.size # add a new result group for this particular benchmark group = [] @results << group description = "#{model_clazz.name}.create (#{num_inserts} records)" group << bm( description ) do vals.each do |values| model_clazz.create create_hash_for_cols_and_vals( cols, values ) end end description = "#{model_clazz.name}.import(column, values) for #{num_inserts} records with validations" group << bm( description ) { model_clazz.import cols, vals, validate: true } description = "#{model_clazz.name}.import(columns, values) for #{num_inserts} records without validations" group << bm( description ) { model_clazz.import cols, vals, validate: false } models = [] array_of_attrs = [] vals.each do |arr| array_of_attrs << (attrs = {}) arr.each_with_index { |value, i| attrs[cols[i]] = value } end array_of_attrs.each { |attrs| models << model_clazz.new(attrs) } description = "#{model_clazz.name}.import(models) for #{num_inserts} records with validations" group << bm( description ) { model_clazz.import models, validate: true } description = "#{model_clazz.name}.import(models) for #{num_inserts} records without validations" group << bm( description ) { model_clazz.import models, validate: false } true end # Returns a two element array composing of an array of columns and an array of # value sets given the passed +num+. # # === What is a value set? # A value set is an array of arrays. Each child array represents an array of value sets # for a given row of data. # # For example, say we wanted to represent an insertion of two records: # column_names = [ 'id', 'name', 'description' ] # record1 = [ 1, 'John Doe', 'A plumber' ] # record2 = [ 2, 'John Smith', 'A painter' ] # value_set [ record1, record2 ] # # == PARAMETER # * num - the number of records to create def build_array_of_cols_and_vals( num ) cols = [:my_name, :description] value_sets = [] num.times { |i| value_sets << ["My Name #{i}", "My Description #{i}"] } [cols, value_sets] end # Returns a hash of column identifier to value mappings giving the passed in # value array. # # Example: # cols = [ 'id', 'name', 'description' ] # values = [ 1, 'John Doe', 'A plumber' ] # hsh = create_hash_for_cols_and_vals( cols, values ) # # hsh => { 'id'=>1, 'name'=>'John Doe', 'description'=>'A plumber' } def create_hash_for_cols_and_vals( cols, vals ) h = {} cols.zip( vals ) { |col, val| h[col] = val } h end # Deletes all records from all ActiveRecord subclasses def delete_all ActiveRecord::Base.send( :subclasses ).each do |subclass| if subclass.table_exists? && subclass.respond_to?(:delete_all) subclass.delete_all end end end def initialize # :nodoc: @results = [] end end activerecord-import-0.28.2/benchmarks/lib/output_to_html.rb0000644000004100000410000000320513424140671024115 0ustar www-datawww-datarequire 'erb' module OutputToHTML TEMPLATE_HEADER = <<"EOT".freeze
All times are rounded to the nearest thousandth for display purposes. Speedups next to each time are computed before any rounding occurs. Also, all speedup calculations are computed by comparing a given time against the very first column (which is always the default ActiveRecord::Base.create method.
EOT TEMPLATE = <<"EOT".freeze <% columns.each do |col| %> <% end %> <% times.each do |time| %> <% end %>
<%= col %>
<%= time %>
 
EOT def self.output_results( filename, results ) html = '' results.each do |result_set| columns = [] times = [] result_set.each do |result| columns << result.description if result.failed times << "failed" else time = result.tms.real.round_to( 3 ) speedup = ( result_set.first.tms.real / result.tms.real ).round times << (result == result_set.first ? time.to_s : "#{time} (#{speedup}x speedup)") end end template = ERB.new( TEMPLATE, 0, "%<>") html << template.result( binding ) end File.open( filename, 'w' ) { |file| file.write( TEMPLATE_HEADER + html ) } end end activerecord-import-0.28.2/benchmarks/lib/float.rb0000644000004100000410000000043213424140671022133 0ustar www-datawww-data# Taken from http://www.programmingishard.com/posts/show/128 # Posted by rbates class Float def round_to(x) (self * 10**x).round.to_f / 10**x end def ceil_to(x) (self * 10**x).ceil.to_f / 10**x end def floor_to(x) (self * 10**x).floor.to_f / 10**x end end activerecord-import-0.28.2/benchmarks/lib/cli_parser.rb0000644000004100000410000000625013424140671023155 0ustar www-datawww-datarequire 'optparse' require 'ostruct' # # == PARAMETERS # * a - database adapter. ie: mysql, postgresql, oracle, etc. # * n - number of objects to test with. ie: 1, 100, 1000, etc. # * t - the table types to test. ie: myisam, innodb, memory, temporary, etc. # module BenchmarkOptionParser BANNER = "Usage: ruby #{$0} [options]\nSee ruby #{$0} -h for more options.".freeze def self.print_banner puts BANNER end def self.print_banner! print_banner exit end def self.print_options( options ) puts "Benchmarking the following options:" puts " Database adapter: #{options.adapter}" puts " Number of objects: #{options.number_of_objects}" puts " Table types:" print_valid_table_types( options, prefix: " " ) end # TODO IMPLEMENT THIS def self.print_valid_table_types( options, hsh = { prefix: '' } ) if !options.table_types.keys.empty? options.table_types.keys.sort.each { |type| puts hsh[:prefix].to_s + type.to_s } else puts 'No table types defined.' end end def self.parse( args ) options = OpenStruct.new( adapter: 'mysql2', table_types: {}, delete_on_finish: true, number_of_objects: [], outputs: [] ) opt_parser = OptionParser.new do |opts| opts.banner = BANNER # parse the database adapter opts.on( "a", "--adapter [String]", "The database adapter to use. IE: mysql, postgresql, oracle" ) do |arg| options.adapter = arg end # parse do_not_delete flag opts.on( "d", "--do-not-delete", "By default all records in the benchmark tables will be deleted at the end of the benchmark. " \ "This flag indicates not to delete the benchmark data." ) do |_| options.delete_on_finish = false end # parse the number of row objects to test opts.on( "n", "--num [Integer]", "The number of objects to benchmark." ) do |arg| options.number_of_objects << arg.to_i end # parse the table types to test opts.on( "t", "--table-type [String]", "The table type to test. This can be used multiple times." ) do |arg| if arg =~ /^all$/ options.table_types['all'] = options.benchmark_all_types = true else options.table_types[arg] = true end end # print results in CSV format opts.on( "--to-csv [String]", "Print results in a CSV file format" ) do |filename| options.outputs << OpenStruct.new( format: 'csv', filename: filename) end # print results in HTML format opts.on( "--to-html [String]", "Print results in HTML format" ) do |filename| options.outputs << OpenStruct.new( format: 'html', filename: filename ) end end # end opt.parse! begin opt_parser.parse!( args ) if options.table_types.empty? options.table_types['all'] = options.benchmark_all_types = true end rescue Exception print_banner! end options.number_of_objects = [1000] if options.number_of_objects.empty? options.outputs = [OpenStruct.new( format: 'html', filename: 'benchmark.html')] if options.outputs.empty? print_options( options ) options end end activerecord-import-0.28.2/benchmarks/lib/mysql2_benchmark.rb0000644000004100000410000000113513424140671024270 0ustar www-datawww-dataclass Mysql2Benchmark < BenchmarkBase def benchmark_all( array_of_cols_and_vals ) methods = self.methods.find_all { |m| m =~ /benchmark_/ } methods.delete_if { |m| m =~ /benchmark_(all|model)/ } methods.each { |method| send( method, array_of_cols_and_vals ) } end def benchmark_myisam( array_of_cols_and_vals ) bm_model( TestMyISAM, array_of_cols_and_vals ) end def benchmark_innodb( array_of_cols_and_vals ) bm_model( TestInnoDb, array_of_cols_and_vals ) end def benchmark_memory( array_of_cols_and_vals ) bm_model( TestMemory, array_of_cols_and_vals ) end end activerecord-import-0.28.2/benchmarks/lib/output_to_csv.rb0000644000004100000410000000071013424140671023742 0ustar www-datawww-datarequire 'csv' module OutputToCSV def self.output_results( filename, results ) CSV.open( filename, 'w' ) do |csv| # Iterate over each result set, which contains many results results.each do |result_set| columns = [] times = [] result_set.each do |result| columns << result.description times << result.tms.real end csv << columns csv << times end end end end activerecord-import-0.28.2/benchmarks/README0000644000004100000410000000217513424140671020621 0ustar www-datawww-dataTo run the benchmarks, from within the benchmarks run: ruby benchmark.rb [options] The following options are supported: --adapter [String] The database adapter to use. IE: mysql, postgresql, oracle --do-not-delete By default all records in the benchmark tables will be deleted at the end of the benchmark. This flag indicates not to delete the benchmark data. --num [Integer] The number of objects to benchmark. (Required!) --table-type [String] The table type to test. This can be used multiple times. By default it is all table types. --to-csv [String] Print results in a CSV file format --to-html [String] Print results in HTML format (String filename must be supplied) See "ruby benchmark.rb -h" for the complete listing of options. EXAMPLES -------- To output to html format: ruby benchmark.rb --adapter=mysql2 --to-html=results.html To output to csv format: ruby benchmark.rb --adapter=mysql2 --to-csv=results.csv LIMITATIONS ----------- Currently MySQL is the only supported adapter to benchmark. AUTHOR ------ Zach Dennis zach.dennis@gmail.com http://www.continuousthinking.com